1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
39 #include "coretypes.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
59 #include "integrate.h"
60 #include "langhooks.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #ifndef STACK_ALIGNMENT_NEEDED
70 #define STACK_ALIGNMENT_NEEDED 1
73 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
88 /* Similar, but round to the next highest integer that meets the
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
92 /* Nonzero if function being compiled doesn't contain any calls
93 (ignoring the prologue and epilogue). This is set prior to
94 local register allocation and is valid for the remaining
96 int current_function_is_leaf
;
98 /* Nonzero if function being compiled doesn't modify the stack pointer
99 (ignoring the prologue and epilogue). This is only valid after
100 life_analysis has run. */
101 int current_function_sp_is_unchanging
;
103 /* Nonzero if the function being compiled is a leaf function which only
104 uses leaf registers. This is valid after reload (specifically after
105 sched2) and is useful only if the port defines LEAF_REGISTERS. */
106 int current_function_uses_only_leaf_regs
;
108 /* Nonzero once virtual register instantiation has been done.
109 assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 calls.c:emit_library_call_value_1 uses it to set up
111 post-instantiation libcalls. */
112 int virtuals_instantiated
;
114 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
115 static GTY(()) int funcdef_no
;
117 /* These variables hold pointers to functions to create and destroy
118 target specific, per-function data structures. */
119 struct machine_function
* (*init_machine_status
) (void);
121 /* The currently compiled function. */
122 struct function
*cfun
= 0;
125 DEF_VEC_ALLOC_I(int,heap
);
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap
) *prologue
;
129 static VEC(int,heap
) *epilogue
;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap
) *sibcall_epilogue
;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot
GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot
*next
;
157 /* Points to previous temporary slot. */
158 struct temp_slot
*prev
;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset
;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size
;
190 /* Forward declarations. */
192 static rtx
assign_stack_local_1 (enum machine_mode
, HOST_WIDE_INT
, int,
194 static struct temp_slot
*find_temp_slot_from_address (rtx
);
195 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
196 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
197 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
198 static void reorder_fix_fragments (tree
);
199 static int all_blocks (tree
, tree
*);
200 static tree
*get_block_vector (tree
, int *);
201 extern tree
debug_find_var_in_block_tree (tree
, tree
);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx
, VEC(int,heap
) **) ATTRIBUTE_UNUSED
;
205 static int contains (rtx
, VEC(int,heap
) **);
207 static void emit_return_into_block (basic_block
, rtx
);
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx
keep_stack_depressed (rtx
);
212 static void prepare_function_start (tree
);
213 static void do_clobber_return_reg (rtx
, void *);
214 static void do_use_return_reg (rtx
, void *);
215 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function
*outer_function_chain
;
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
224 find_function_data (tree decl
)
228 for (p
= outer_function_chain
; p
; p
= p
->outer
)
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
242 push_function_context_to (tree context ATTRIBUTE_UNUSED
)
247 init_dummy_function_start ();
250 p
->outer
= outer_function_chain
;
251 outer_function_chain
= p
;
253 lang_hooks
.function
.enter_nested (p
);
259 push_function_context (void)
261 push_function_context_to (current_function_decl
);
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED
)
270 struct function
*p
= outer_function_chain
;
273 outer_function_chain
= p
->outer
;
275 current_function_decl
= p
->decl
;
277 lang_hooks
.function
.leave_nested (p
);
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated
= 0;
281 generating_concat_p
= 1;
285 pop_function_context (void)
287 pop_function_context_from (current_function_decl
);
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
295 free_after_parsing (struct function
*f
)
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
302 lang_hooks
.function
.final (f
);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
310 free_after_compilation (struct function
*f
)
312 VEC_free (int, heap
, prologue
);
313 VEC_free (int, heap
, epilogue
);
314 VEC_free (int, heap
, sibcall_epilogue
);
323 f
->x_avail_temp_slots
= NULL
;
324 f
->x_used_temp_slots
= NULL
;
325 f
->arg_offset_rtx
= NULL
;
326 f
->return_rtx
= NULL
;
327 f
->internal_arg_pointer
= NULL
;
328 f
->x_nonlocal_goto_handler_labels
= NULL
;
329 f
->x_return_label
= NULL
;
330 f
->x_naked_return_label
= NULL
;
331 f
->x_stack_slot_list
= NULL
;
332 f
->x_tail_recursion_reentry
= NULL
;
333 f
->x_arg_pointer_save_area
= NULL
;
334 f
->x_parm_birth_insn
= NULL
;
335 f
->original_arg_vector
= NULL
;
336 f
->original_decl_initial
= NULL
;
337 f
->epilogue_delay_list
= NULL
;
340 /* Allocate fixed slots in the stack frame of the current function. */
342 /* Return size needed for stack frame based on slots so far allocated in
344 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
345 the caller may have to do that. */
348 get_func_frame_size (struct function
*f
)
350 if (FRAME_GROWS_DOWNWARD
)
351 return -f
->x_frame_offset
;
353 return f
->x_frame_offset
;
356 /* Return size needed for stack frame based on slots so far allocated.
357 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
358 the caller may have to do that. */
360 get_frame_size (void)
362 return get_func_frame_size (cfun
);
365 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
366 with machine mode MODE.
368 ALIGN controls the amount of alignment for the address of the slot:
369 0 means according to MODE,
370 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
371 -2 means use BITS_PER_UNIT,
372 positive specifies alignment boundary in bits.
374 We do not round to stack_boundary here.
376 FUNCTION specifies the function to allocate in. */
379 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
, int align
,
380 struct function
*function
)
383 int bigend_correction
= 0;
384 unsigned int alignment
;
385 int frame_off
, frame_alignment
, frame_phase
;
392 alignment
= BIGGEST_ALIGNMENT
;
394 alignment
= GET_MODE_ALIGNMENT (mode
);
396 /* Allow the target to (possibly) increase the alignment of this
398 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
400 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
402 alignment
/= BITS_PER_UNIT
;
404 else if (align
== -1)
406 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
407 size
= CEIL_ROUND (size
, alignment
);
409 else if (align
== -2)
410 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
412 alignment
= align
/ BITS_PER_UNIT
;
414 if (FRAME_GROWS_DOWNWARD
)
415 function
->x_frame_offset
-= size
;
417 /* Ignore alignment we can't do with expected alignment of the boundary. */
418 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
419 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
421 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
422 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
424 /* Calculate how many bytes the start of local variables is off from
426 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
427 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
428 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
430 /* Round the frame offset to the specified alignment. The default is
431 to always honor requests to align the stack but a port may choose to
432 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
433 if (STACK_ALIGNMENT_NEEDED
437 /* We must be careful here, since FRAME_OFFSET might be negative and
438 division with a negative dividend isn't as well defined as we might
439 like. So we instead assume that ALIGNMENT is a power of two and
440 use logical operations which are unambiguous. */
441 if (FRAME_GROWS_DOWNWARD
)
442 function
->x_frame_offset
443 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
,
444 (unsigned HOST_WIDE_INT
) alignment
)
447 function
->x_frame_offset
448 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
,
449 (unsigned HOST_WIDE_INT
) alignment
)
453 /* On a big-endian machine, if we are allocating more space than we will use,
454 use the least significant bytes of those that are allocated. */
455 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
456 bigend_correction
= size
- GET_MODE_SIZE (mode
);
458 /* If we have already instantiated virtual registers, return the actual
459 address relative to the frame pointer. */
460 if (function
== cfun
&& virtuals_instantiated
)
461 addr
= plus_constant (frame_pointer_rtx
,
463 (frame_offset
+ bigend_correction
464 + STARTING_FRAME_OFFSET
, Pmode
));
466 addr
= plus_constant (virtual_stack_vars_rtx
,
468 (function
->x_frame_offset
+ bigend_correction
,
471 if (!FRAME_GROWS_DOWNWARD
)
472 function
->x_frame_offset
+= size
;
474 x
= gen_rtx_MEM (mode
, addr
);
476 function
->x_stack_slot_list
477 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
482 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
486 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
488 return assign_stack_local_1 (mode
, size
, align
, cfun
);
492 /* Removes temporary slot TEMP from LIST. */
495 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
498 temp
->next
->prev
= temp
->prev
;
500 temp
->prev
->next
= temp
->next
;
504 temp
->prev
= temp
->next
= NULL
;
507 /* Inserts temporary slot TEMP to LIST. */
510 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
514 (*list
)->prev
= temp
;
519 /* Returns the list of used temp slots at LEVEL. */
521 static struct temp_slot
**
522 temp_slots_at_level (int level
)
525 if (!used_temp_slots
)
526 VARRAY_GENERIC_PTR_INIT (used_temp_slots
, 3, "used_temp_slots");
528 while (level
>= (int) VARRAY_ACTIVE_SIZE (used_temp_slots
))
529 VARRAY_PUSH_GENERIC_PTR (used_temp_slots
, NULL
);
531 return (struct temp_slot
**) &VARRAY_GENERIC_PTR (used_temp_slots
, level
);
534 /* Returns the maximal temporary slot level. */
537 max_slot_level (void)
539 if (!used_temp_slots
)
542 return VARRAY_ACTIVE_SIZE (used_temp_slots
) - 1;
545 /* Moves temporary slot TEMP to LEVEL. */
548 move_slot_to_level (struct temp_slot
*temp
, int level
)
550 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
551 insert_slot_to_list (temp
, temp_slots_at_level (level
));
555 /* Make temporary slot TEMP available. */
558 make_slot_available (struct temp_slot
*temp
)
560 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
561 insert_slot_to_list (temp
, &avail_temp_slots
);
566 /* Allocate a temporary stack slot and record it for possible later
569 MODE is the machine mode to be given to the returned rtx.
571 SIZE is the size in units of the space required. We do no rounding here
572 since assign_stack_local will do any required rounding.
574 KEEP is 1 if this slot is to be retained after a call to
575 free_temp_slots. Automatic variables for a block are allocated
576 with this flag. KEEP values of 2 or 3 were needed respectively
577 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
578 or for SAVE_EXPRs, but they are now unused.
580 TYPE is the type that will be used for the stack slot. */
583 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
587 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
590 /* If SIZE is -1 it means that somebody tried to allocate a temporary
591 of a variable size. */
592 gcc_assert (size
!= -1);
594 /* These are now unused. */
595 gcc_assert (keep
<= 1);
598 align
= BIGGEST_ALIGNMENT
;
600 align
= GET_MODE_ALIGNMENT (mode
);
603 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
606 align
= LOCAL_ALIGNMENT (type
, align
);
608 /* Try to find an available, already-allocated temporary of the proper
609 mode which meets the size and alignment requirements. Choose the
610 smallest one with the closest alignment. */
611 for (p
= avail_temp_slots
; p
; p
= p
->next
)
613 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
614 && objects_must_conflict_p (p
->type
, type
)
615 && (best_p
== 0 || best_p
->size
> p
->size
616 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
618 if (p
->align
== align
&& p
->size
== size
)
621 cut_slot_from_list (selected
, &avail_temp_slots
);
629 /* Make our best, if any, the one to use. */
633 cut_slot_from_list (selected
, &avail_temp_slots
);
635 /* If there are enough aligned bytes left over, make them into a new
636 temp_slot so that the extra bytes don't get wasted. Do this only
637 for BLKmode slots, so that we can be sure of the alignment. */
638 if (GET_MODE (best_p
->slot
) == BLKmode
)
640 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
641 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
643 if (best_p
->size
- rounded_size
>= alignment
)
645 p
= ggc_alloc (sizeof (struct temp_slot
));
646 p
->in_use
= p
->addr_taken
= 0;
647 p
->size
= best_p
->size
- rounded_size
;
648 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
649 p
->full_size
= best_p
->full_size
- rounded_size
;
650 p
->slot
= gen_rtx_MEM (BLKmode
,
651 plus_constant (XEXP (best_p
->slot
, 0),
653 p
->align
= best_p
->align
;
655 p
->type
= best_p
->type
;
656 insert_slot_to_list (p
, &avail_temp_slots
);
658 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
661 best_p
->size
= rounded_size
;
662 best_p
->full_size
= rounded_size
;
667 /* If we still didn't find one, make a new temporary. */
670 HOST_WIDE_INT frame_offset_old
= frame_offset
;
672 p
= ggc_alloc (sizeof (struct temp_slot
));
674 /* We are passing an explicit alignment request to assign_stack_local.
675 One side effect of that is assign_stack_local will not round SIZE
676 to ensure the frame offset remains suitably aligned.
678 So for requests which depended on the rounding of SIZE, we go ahead
679 and round it now. We also make sure ALIGNMENT is at least
680 BIGGEST_ALIGNMENT. */
681 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
682 p
->slot
= assign_stack_local (mode
,
684 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
690 /* The following slot size computation is necessary because we don't
691 know the actual size of the temporary slot until assign_stack_local
692 has performed all the frame alignment and size rounding for the
693 requested temporary. Note that extra space added for alignment
694 can be either above or below this stack slot depending on which
695 way the frame grows. We include the extra space if and only if it
696 is above this slot. */
697 if (FRAME_GROWS_DOWNWARD
)
698 p
->size
= frame_offset_old
- frame_offset
;
702 /* Now define the fields used by combine_temp_slots. */
703 if (FRAME_GROWS_DOWNWARD
)
705 p
->base_offset
= frame_offset
;
706 p
->full_size
= frame_offset_old
- frame_offset
;
710 p
->base_offset
= frame_offset_old
;
711 p
->full_size
= frame_offset
- frame_offset_old
;
722 p
->level
= temp_slot_level
;
725 pp
= temp_slots_at_level (p
->level
);
726 insert_slot_to_list (p
, pp
);
728 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
729 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
730 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
732 /* If we know the alias set for the memory that will be used, use
733 it. If there's no TYPE, then we don't know anything about the
734 alias set for the memory. */
735 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
736 set_mem_align (slot
, align
);
738 /* If a type is specified, set the relevant flags. */
741 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
742 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
748 /* Allocate a temporary stack slot and record it for possible later
749 reuse. First three arguments are same as in preceding function. */
752 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
754 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
757 /* Assign a temporary.
758 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
759 and so that should be used in error messages. In either case, we
760 allocate of the given type.
761 KEEP is as for assign_stack_temp.
762 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
763 it is 0 if a register is OK.
764 DONT_PROMOTE is 1 if we should not promote values in register
768 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
769 int dont_promote ATTRIBUTE_UNUSED
)
772 enum machine_mode mode
;
777 if (DECL_P (type_or_decl
))
778 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
780 decl
= NULL
, type
= type_or_decl
;
782 mode
= TYPE_MODE (type
);
784 unsignedp
= TYPE_UNSIGNED (type
);
787 if (mode
== BLKmode
|| memory_required
)
789 HOST_WIDE_INT size
= int_size_in_bytes (type
);
793 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
794 problems with allocating the stack space. */
798 /* Unfortunately, we don't yet know how to allocate variable-sized
799 temporaries. However, sometimes we have a fixed upper limit on
800 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
801 instead. This is the case for Chill variable-sized strings. */
802 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
803 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
804 && host_integerp (TYPE_ARRAY_MAX_SIZE (type
), 1))
805 size
= tree_low_cst (TYPE_ARRAY_MAX_SIZE (type
), 1);
807 /* If we still haven't been able to get a size, see if the language
808 can compute a maximum size. */
810 && (size_tree
= lang_hooks
.types
.max_size (type
)) != 0
811 && host_integerp (size_tree
, 1))
812 size
= tree_low_cst (size_tree
, 1);
814 /* The size of the temporary may be too large to fit into an integer. */
815 /* ??? Not sure this should happen except for user silliness, so limit
816 this to things that aren't compiler-generated temporaries. The
817 rest of the time we'll die in assign_stack_temp_for_type. */
818 if (decl
&& size
== -1
819 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
821 error ("%Jsize of variable %qD is too large", decl
, decl
);
825 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
831 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
834 return gen_reg_rtx (mode
);
837 /* Combine temporary stack slots which are adjacent on the stack.
839 This allows for better use of already allocated stack space. This is only
840 done for BLKmode slots because we can be sure that we won't have alignment
841 problems in this case. */
844 combine_temp_slots (void)
846 struct temp_slot
*p
, *q
, *next
, *next_q
;
849 /* We can't combine slots, because the information about which slot
850 is in which alias set will be lost. */
851 if (flag_strict_aliasing
)
854 /* If there are a lot of temp slots, don't do anything unless
855 high levels of optimization. */
856 if (! flag_expensive_optimizations
)
857 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
858 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
861 for (p
= avail_temp_slots
; p
; p
= next
)
867 if (GET_MODE (p
->slot
) != BLKmode
)
870 for (q
= p
->next
; q
; q
= next_q
)
876 if (GET_MODE (q
->slot
) != BLKmode
)
879 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
881 /* Q comes after P; combine Q into P. */
883 p
->full_size
+= q
->full_size
;
886 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
888 /* P comes after Q; combine P into Q. */
890 q
->full_size
+= p
->full_size
;
895 cut_slot_from_list (q
, &avail_temp_slots
);
898 /* Either delete P or advance past it. */
900 cut_slot_from_list (p
, &avail_temp_slots
);
904 /* Find the temp slot corresponding to the object at address X. */
906 static struct temp_slot
*
907 find_temp_slot_from_address (rtx x
)
913 for (i
= max_slot_level (); i
>= 0; i
--)
914 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
916 if (XEXP (p
->slot
, 0) == x
918 || (GET_CODE (x
) == PLUS
919 && XEXP (x
, 0) == virtual_stack_vars_rtx
920 && GET_CODE (XEXP (x
, 1)) == CONST_INT
921 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
922 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
925 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
926 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
927 if (XEXP (next
, 0) == x
)
931 /* If we have a sum involving a register, see if it points to a temp
933 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
934 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
936 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
937 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
943 /* Indicate that NEW is an alternate way of referring to the temp slot
944 that previously was known by OLD. */
947 update_temp_slot_address (rtx old
, rtx
new)
951 if (rtx_equal_p (old
, new))
954 p
= find_temp_slot_from_address (old
);
956 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
957 is a register, see if one operand of the PLUS is a temporary
958 location. If so, NEW points into it. Otherwise, if both OLD and
959 NEW are a PLUS and if there is a register in common between them.
960 If so, try a recursive call on those values. */
963 if (GET_CODE (old
) != PLUS
)
968 update_temp_slot_address (XEXP (old
, 0), new);
969 update_temp_slot_address (XEXP (old
, 1), new);
972 else if (GET_CODE (new) != PLUS
)
975 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
976 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
977 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
978 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
979 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
980 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
981 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
982 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
987 /* Otherwise add an alias for the temp's address. */
988 else if (p
->address
== 0)
992 if (GET_CODE (p
->address
) != EXPR_LIST
)
993 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
995 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
999 /* If X could be a reference to a temporary slot, mark the fact that its
1000 address was taken. */
1003 mark_temp_addr_taken (rtx x
)
1005 struct temp_slot
*p
;
1010 /* If X is not in memory or is at a constant address, it cannot be in
1011 a temporary slot. */
1012 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1015 p
= find_temp_slot_from_address (XEXP (x
, 0));
1020 /* If X could be a reference to a temporary slot, mark that slot as
1021 belonging to the to one level higher than the current level. If X
1022 matched one of our slots, just mark that one. Otherwise, we can't
1023 easily predict which it is, so upgrade all of them. Kept slots
1024 need not be touched.
1026 This is called when an ({...}) construct occurs and a statement
1027 returns a value in memory. */
1030 preserve_temp_slots (rtx x
)
1032 struct temp_slot
*p
= 0, *next
;
1034 /* If there is no result, we still might have some objects whose address
1035 were taken, so we need to make sure they stay around. */
1038 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1043 move_slot_to_level (p
, temp_slot_level
- 1);
1049 /* If X is a register that is being used as a pointer, see if we have
1050 a temporary slot we know it points to. To be consistent with
1051 the code below, we really should preserve all non-kept slots
1052 if we can't find a match, but that seems to be much too costly. */
1053 if (REG_P (x
) && REG_POINTER (x
))
1054 p
= find_temp_slot_from_address (x
);
1056 /* If X is not in memory or is at a constant address, it cannot be in
1057 a temporary slot, but it can contain something whose address was
1059 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1061 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1066 move_slot_to_level (p
, temp_slot_level
- 1);
1072 /* First see if we can find a match. */
1074 p
= find_temp_slot_from_address (XEXP (x
, 0));
1078 /* Move everything at our level whose address was taken to our new
1079 level in case we used its address. */
1080 struct temp_slot
*q
;
1082 if (p
->level
== temp_slot_level
)
1084 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1088 if (p
!= q
&& q
->addr_taken
)
1089 move_slot_to_level (q
, temp_slot_level
- 1);
1092 move_slot_to_level (p
, temp_slot_level
- 1);
1098 /* Otherwise, preserve all non-kept slots at this level. */
1099 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1104 move_slot_to_level (p
, temp_slot_level
- 1);
1108 /* Free all temporaries used so far. This is normally called at the
1109 end of generating code for a statement. */
1112 free_temp_slots (void)
1114 struct temp_slot
*p
, *next
;
1116 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1121 make_slot_available (p
);
1124 combine_temp_slots ();
1127 /* Push deeper into the nesting level for stack temporaries. */
1130 push_temp_slots (void)
1135 /* Pop a temporary nesting level. All slots in use in the current level
1139 pop_temp_slots (void)
1141 struct temp_slot
*p
, *next
;
1143 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1146 make_slot_available (p
);
1149 combine_temp_slots ();
1154 /* Initialize temporary slots. */
1157 init_temp_slots (void)
1159 /* We have not allocated any temporaries yet. */
1160 avail_temp_slots
= 0;
1161 used_temp_slots
= 0;
1162 temp_slot_level
= 0;
1165 /* These routines are responsible for converting virtual register references
1166 to the actual hard register references once RTL generation is complete.
1168 The following four variables are used for communication between the
1169 routines. They contain the offsets of the virtual registers from their
1170 respective hard registers. */
1172 static int in_arg_offset
;
1173 static int var_offset
;
1174 static int dynamic_offset
;
1175 static int out_arg_offset
;
1176 static int cfa_offset
;
1178 /* In most machines, the stack pointer register is equivalent to the bottom
1181 #ifndef STACK_POINTER_OFFSET
1182 #define STACK_POINTER_OFFSET 0
1185 /* If not defined, pick an appropriate default for the offset of dynamically
1186 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1187 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1189 #ifndef STACK_DYNAMIC_OFFSET
1191 /* The bottom of the stack points to the actual arguments. If
1192 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1193 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1194 stack space for register parameters is not pushed by the caller, but
1195 rather part of the fixed stack areas and hence not included in
1196 `current_function_outgoing_args_size'. Nevertheless, we must allow
1197 for it when allocating stack dynamic objects. */
1199 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1200 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1201 ((ACCUMULATE_OUTGOING_ARGS \
1202 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1203 + (STACK_POINTER_OFFSET)) \
1206 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1207 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1208 + (STACK_POINTER_OFFSET))
1212 /* On most machines, the CFA coincides with the first incoming parm. */
1214 #ifndef ARG_POINTER_CFA_OFFSET
1215 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1219 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1220 is a virtual register, return the equivalent hard register and set the
1221 offset indirectly through the pointer. Otherwise, return 0. */
1224 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1227 HOST_WIDE_INT offset
;
1229 if (x
== virtual_incoming_args_rtx
)
1230 new = arg_pointer_rtx
, offset
= in_arg_offset
;
1231 else if (x
== virtual_stack_vars_rtx
)
1232 new = frame_pointer_rtx
, offset
= var_offset
;
1233 else if (x
== virtual_stack_dynamic_rtx
)
1234 new = stack_pointer_rtx
, offset
= dynamic_offset
;
1235 else if (x
== virtual_outgoing_args_rtx
)
1236 new = stack_pointer_rtx
, offset
= out_arg_offset
;
1237 else if (x
== virtual_cfa_rtx
)
1238 new = arg_pointer_rtx
, offset
= cfa_offset
;
1246 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1247 Instantiate any virtual registers present inside of *LOC. The expression
1248 is simplified, as much as possible, but is not to be considered "valid"
1249 in any sense implied by the target. If any change is made, set CHANGED
1253 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1255 HOST_WIDE_INT offset
;
1256 bool *changed
= (bool *) data
;
1263 switch (GET_CODE (x
))
1266 new = instantiate_new_reg (x
, &offset
);
1269 *loc
= plus_constant (new, offset
);
1276 new = instantiate_new_reg (XEXP (x
, 0), &offset
);
1279 new = plus_constant (new, offset
);
1280 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new, XEXP (x
, 1));
1286 /* FIXME -- from old code */
1287 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1288 we can commute the PLUS and SUBREG because pointers into the
1289 frame are well-behaved. */
1299 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1300 matches the predicate for insn CODE operand OPERAND. */
1303 safe_insn_predicate (int code
, int operand
, rtx x
)
1305 const struct insn_operand_data
*op_data
;
1310 op_data
= &insn_data
[code
].operand
[operand
];
1311 if (op_data
->predicate
== NULL
)
1314 return op_data
->predicate (x
, op_data
->mode
);
1317 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1318 registers present inside of insn. The result will be a valid insn. */
1321 instantiate_virtual_regs_in_insn (rtx insn
)
1323 HOST_WIDE_INT offset
;
1325 bool any_change
= false;
1326 rtx set
, new, x
, seq
;
1328 /* There are some special cases to be handled first. */
1329 set
= single_set (insn
);
1332 /* We're allowed to assign to a virtual register. This is interpreted
1333 to mean that the underlying register gets assigned the inverse
1334 transformation. This is used, for example, in the handling of
1336 new = instantiate_new_reg (SET_DEST (set
), &offset
);
1341 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1342 x
= simplify_gen_binary (PLUS
, GET_MODE (new), SET_SRC (set
),
1344 x
= force_operand (x
, new);
1346 emit_move_insn (new, x
);
1351 emit_insn_before (seq
, insn
);
1356 /* Handle a straight copy from a virtual register by generating a
1357 new add insn. The difference between this and falling through
1358 to the generic case is avoiding a new pseudo and eliminating a
1359 move insn in the initial rtl stream. */
1360 new = instantiate_new_reg (SET_SRC (set
), &offset
);
1361 if (new && offset
!= 0
1362 && REG_P (SET_DEST (set
))
1363 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1367 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1368 new, GEN_INT (offset
), SET_DEST (set
),
1369 1, OPTAB_LIB_WIDEN
);
1370 if (x
!= SET_DEST (set
))
1371 emit_move_insn (SET_DEST (set
), x
);
1376 emit_insn_before (seq
, insn
);
1381 extract_insn (insn
);
1382 insn_code
= INSN_CODE (insn
);
1384 /* Handle a plus involving a virtual register by determining if the
1385 operands remain valid if they're modified in place. */
1386 if (GET_CODE (SET_SRC (set
)) == PLUS
1387 && recog_data
.n_operands
>= 3
1388 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1389 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1390 && GET_CODE (recog_data
.operand
[2]) == CONST_INT
1391 && (new = instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1393 offset
+= INTVAL (recog_data
.operand
[2]);
1395 /* If the sum is zero, then replace with a plain move. */
1397 && REG_P (SET_DEST (set
))
1398 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1401 emit_move_insn (SET_DEST (set
), new);
1405 emit_insn_before (seq
, insn
);
1410 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1412 /* Using validate_change and apply_change_group here leaves
1413 recog_data in an invalid state. Since we know exactly what
1414 we want to check, do those two by hand. */
1415 if (safe_insn_predicate (insn_code
, 1, new)
1416 && safe_insn_predicate (insn_code
, 2, x
))
1418 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new;
1419 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1422 /* Fall through into the regular operand fixup loop in
1423 order to take care of operands other than 1 and 2. */
1429 extract_insn (insn
);
1430 insn_code
= INSN_CODE (insn
);
1433 /* In the general case, we expect virtual registers to appear only in
1434 operands, and then only as either bare registers or inside memories. */
1435 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1437 x
= recog_data
.operand
[i
];
1438 switch (GET_CODE (x
))
1442 rtx addr
= XEXP (x
, 0);
1443 bool changed
= false;
1445 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1450 x
= replace_equiv_address (x
, addr
);
1454 emit_insn_before (seq
, insn
);
1459 new = instantiate_new_reg (x
, &offset
);
1468 /* Careful, special mode predicates may have stuff in
1469 insn_data[insn_code].operand[i].mode that isn't useful
1470 to us for computing a new value. */
1471 /* ??? Recognize address_operand and/or "p" constraints
1472 to see if (plus new offset) is a valid before we put
1473 this through expand_simple_binop. */
1474 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new,
1475 GEN_INT (offset
), NULL_RTX
,
1476 1, OPTAB_LIB_WIDEN
);
1479 emit_insn_before (seq
, insn
);
1484 new = instantiate_new_reg (SUBREG_REG (x
), &offset
);
1490 new = expand_simple_binop (GET_MODE (new), PLUS
, new,
1491 GEN_INT (offset
), NULL_RTX
,
1492 1, OPTAB_LIB_WIDEN
);
1495 emit_insn_before (seq
, insn
);
1497 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new,
1498 GET_MODE (new), SUBREG_BYTE (x
));
1505 /* At this point, X contains the new value for the operand.
1506 Validate the new value vs the insn predicate. Note that
1507 asm insns will have insn_code -1 here. */
1508 if (!safe_insn_predicate (insn_code
, i
, x
))
1509 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1511 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1517 /* Propagate operand changes into the duplicates. */
1518 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1519 *recog_data
.dup_loc
[i
]
1520 = recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]];
1522 /* Force re-recognition of the instruction for validation. */
1523 INSN_CODE (insn
) = -1;
1526 if (asm_noperands (PATTERN (insn
)) >= 0)
1528 if (!check_asm_operands (PATTERN (insn
)))
1530 error_for_asm (insn
, "impossible constraint in %<asm%>");
1536 if (recog_memoized (insn
) < 0)
1537 fatal_insn_not_found (insn
);
1541 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1542 do any instantiation required. */
1545 instantiate_decl (rtx x
)
1552 /* If this is a CONCAT, recurse for the pieces. */
1553 if (GET_CODE (x
) == CONCAT
)
1555 instantiate_decl (XEXP (x
, 0));
1556 instantiate_decl (XEXP (x
, 1));
1560 /* If this is not a MEM, no need to do anything. Similarly if the
1561 address is a constant or a register that is not a virtual register. */
1566 if (CONSTANT_P (addr
)
1568 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1569 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1572 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1575 /* Subroutine of instantiate_decls: Process all decls in the given
1576 BLOCK node and all its subblocks. */
1579 instantiate_decls_1 (tree let
)
1583 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1584 if (DECL_RTL_SET_P (t
))
1585 instantiate_decl (DECL_RTL (t
));
1587 /* Process all subblocks. */
1588 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1589 instantiate_decls_1 (t
);
1592 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1593 all virtual registers in their DECL_RTL's. */
1596 instantiate_decls (tree fndecl
)
1600 /* Process all parameters of the function. */
1601 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1603 instantiate_decl (DECL_RTL (decl
));
1604 instantiate_decl (DECL_INCOMING_RTL (decl
));
1607 /* Now process all variables defined in the function or its subblocks. */
1608 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1611 /* Pass through the INSNS of function FNDECL and convert virtual register
1612 references to hard register references. */
1615 instantiate_virtual_regs (void)
1619 /* Compute the offsets to use for this function. */
1620 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1621 var_offset
= STARTING_FRAME_OFFSET
;
1622 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1623 out_arg_offset
= STACK_POINTER_OFFSET
;
1624 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1626 /* Initialize recognition, indicating that volatile is OK. */
1629 /* Scan through all the insns, instantiating every virtual register still
1631 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1634 /* These patterns in the instruction stream can never be recognized.
1635 Fortunately, they shouldn't contain virtual registers either. */
1636 if (GET_CODE (PATTERN (insn
)) == USE
1637 || GET_CODE (PATTERN (insn
)) == CLOBBER
1638 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1639 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1640 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1643 instantiate_virtual_regs_in_insn (insn
);
1645 if (INSN_DELETED_P (insn
))
1648 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1650 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1651 if (GET_CODE (insn
) == CALL_INSN
)
1652 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1653 instantiate_virtual_regs_in_rtx
, NULL
);
1656 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1657 instantiate_decls (current_function_decl
);
1659 /* Indicate that, from now on, assign_stack_local should use
1660 frame_pointer_rtx. */
1661 virtuals_instantiated
= 1;
1664 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1665 This means a type for which function calls must pass an address to the
1666 function or get an address back from the function.
1667 EXP may be a type node or an expression (whose type is tested). */
1670 aggregate_value_p (tree exp
, tree fntype
)
1672 int i
, regno
, nregs
;
1675 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1678 switch (TREE_CODE (fntype
))
1681 fntype
= get_callee_fndecl (fntype
);
1682 fntype
= fntype
? TREE_TYPE (fntype
) : 0;
1685 fntype
= TREE_TYPE (fntype
);
1690 case IDENTIFIER_NODE
:
1694 /* We don't expect other rtl types here. */
1698 if (TREE_CODE (type
) == VOID_TYPE
)
1700 /* If the front end has decided that this needs to be passed by
1701 reference, do so. */
1702 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
1703 && DECL_BY_REFERENCE (exp
))
1705 if (targetm
.calls
.return_in_memory (type
, fntype
))
1707 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1708 and thus can't be returned in registers. */
1709 if (TREE_ADDRESSABLE (type
))
1711 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
1713 /* Make sure we have suitable call-clobbered regs to return
1714 the value in; if not, we must return it in memory. */
1715 reg
= hard_function_value (type
, 0, 0);
1717 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1722 regno
= REGNO (reg
);
1723 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
1724 for (i
= 0; i
< nregs
; i
++)
1725 if (! call_used_regs
[regno
+ i
])
1730 /* Return true if we should assign DECL a pseudo register; false if it
1731 should live on the local stack. */
1734 use_register_for_decl (tree decl
)
1736 /* Honor volatile. */
1737 if (TREE_SIDE_EFFECTS (decl
))
1740 /* Honor addressability. */
1741 if (TREE_ADDRESSABLE (decl
))
1744 /* Only register-like things go in registers. */
1745 if (DECL_MODE (decl
) == BLKmode
)
1748 /* If -ffloat-store specified, don't put explicit float variables
1750 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1751 propagates values across these stores, and it probably shouldn't. */
1752 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
1755 /* If we're not interested in tracking debugging information for
1756 this decl, then we can certainly put it in a register. */
1757 if (DECL_IGNORED_P (decl
))
1760 return (optimize
|| DECL_REGISTER (decl
));
1763 /* Return true if TYPE should be passed by invisible reference. */
1766 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1767 tree type
, bool named_arg
)
1771 /* If this type contains non-trivial constructors, then it is
1772 forbidden for the middle-end to create any new copies. */
1773 if (TREE_ADDRESSABLE (type
))
1776 /* GCC post 3.4 passes *all* variable sized types by reference. */
1777 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
1781 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
1784 /* Return true if TYPE, which is passed by reference, should be callee
1785 copied instead of caller copied. */
1788 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1789 tree type
, bool named_arg
)
1791 if (type
&& TREE_ADDRESSABLE (type
))
1793 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
1796 /* Structures to communicate between the subroutines of assign_parms.
1797 The first holds data persistent across all parameters, the second
1798 is cleared out for each parameter. */
1800 struct assign_parm_data_all
1802 CUMULATIVE_ARGS args_so_far
;
1803 struct args_size stack_args_size
;
1804 tree function_result_decl
;
1806 rtx conversion_insns
;
1807 HOST_WIDE_INT pretend_args_size
;
1808 HOST_WIDE_INT extra_pretend_bytes
;
1809 int reg_parm_stack_space
;
1812 struct assign_parm_data_one
1818 enum machine_mode nominal_mode
;
1819 enum machine_mode passed_mode
;
1820 enum machine_mode promoted_mode
;
1821 struct locate_and_pad_arg_data locate
;
1823 BOOL_BITFIELD named_arg
: 1;
1824 BOOL_BITFIELD passed_pointer
: 1;
1825 BOOL_BITFIELD on_stack
: 1;
1826 BOOL_BITFIELD loaded_in_reg
: 1;
1829 /* A subroutine of assign_parms. Initialize ALL. */
1832 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
1836 memset (all
, 0, sizeof (*all
));
1838 fntype
= TREE_TYPE (current_function_decl
);
1840 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1841 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
1843 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
1844 current_function_decl
, -1);
1847 #ifdef REG_PARM_STACK_SPACE
1848 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
1852 /* If ARGS contains entries with complex types, split the entry into two
1853 entries of the component type. Return a new list of substitutions are
1854 needed, else the old list. */
1857 split_complex_args (tree args
)
1861 /* Before allocating memory, check for the common case of no complex. */
1862 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1864 tree type
= TREE_TYPE (p
);
1865 if (TREE_CODE (type
) == COMPLEX_TYPE
1866 && targetm
.calls
.split_complex_arg (type
))
1872 args
= copy_list (args
);
1874 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1876 tree type
= TREE_TYPE (p
);
1877 if (TREE_CODE (type
) == COMPLEX_TYPE
1878 && targetm
.calls
.split_complex_arg (type
))
1881 tree subtype
= TREE_TYPE (type
);
1882 bool addressable
= TREE_ADDRESSABLE (p
);
1884 /* Rewrite the PARM_DECL's type with its component. */
1885 TREE_TYPE (p
) = subtype
;
1886 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
1887 DECL_MODE (p
) = VOIDmode
;
1888 DECL_SIZE (p
) = NULL
;
1889 DECL_SIZE_UNIT (p
) = NULL
;
1890 /* If this arg must go in memory, put it in a pseudo here.
1891 We can't allow it to go in memory as per normal parms,
1892 because the usual place might not have the imag part
1893 adjacent to the real part. */
1894 DECL_ARTIFICIAL (p
) = addressable
;
1895 DECL_IGNORED_P (p
) = addressable
;
1896 TREE_ADDRESSABLE (p
) = 0;
1899 /* Build a second synthetic decl. */
1900 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
1901 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
1902 DECL_ARTIFICIAL (decl
) = addressable
;
1903 DECL_IGNORED_P (decl
) = addressable
;
1904 layout_decl (decl
, 0);
1906 /* Splice it in; skip the new decl. */
1907 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
1908 TREE_CHAIN (p
) = decl
;
1916 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1917 the hidden struct return argument, and (abi willing) complex args.
1918 Return the new parameter list. */
1921 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
1923 tree fndecl
= current_function_decl
;
1924 tree fntype
= TREE_TYPE (fndecl
);
1925 tree fnargs
= DECL_ARGUMENTS (fndecl
);
1927 /* If struct value address is treated as the first argument, make it so. */
1928 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
1929 && ! current_function_returns_pcc_struct
1930 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
1932 tree type
= build_pointer_type (TREE_TYPE (fntype
));
1935 decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
1936 DECL_ARG_TYPE (decl
) = type
;
1937 DECL_ARTIFICIAL (decl
) = 1;
1938 DECL_IGNORED_P (decl
) = 1;
1940 TREE_CHAIN (decl
) = fnargs
;
1942 all
->function_result_decl
= decl
;
1945 all
->orig_fnargs
= fnargs
;
1947 /* If the target wants to split complex arguments into scalars, do so. */
1948 if (targetm
.calls
.split_complex_arg
)
1949 fnargs
= split_complex_args (fnargs
);
1954 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
1955 data for the parameter. Incorporate ABI specifics such as pass-by-
1956 reference and type promotion. */
1959 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
1960 struct assign_parm_data_one
*data
)
1962 tree nominal_type
, passed_type
;
1963 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
1965 memset (data
, 0, sizeof (*data
));
1967 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
1968 if (!current_function_stdarg
)
1969 data
->named_arg
= 1; /* No varadic parms. */
1970 else if (TREE_CHAIN (parm
))
1971 data
->named_arg
= 1; /* Not the last non-varadic parm. */
1972 else if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
1973 data
->named_arg
= 1; /* Only varadic ones are unnamed. */
1975 data
->named_arg
= 0; /* Treat as varadic. */
1977 nominal_type
= TREE_TYPE (parm
);
1978 passed_type
= DECL_ARG_TYPE (parm
);
1980 /* Look out for errors propagating this far. Also, if the parameter's
1981 type is void then its value doesn't matter. */
1982 if (TREE_TYPE (parm
) == error_mark_node
1983 /* This can happen after weird syntax errors
1984 or if an enum type is defined among the parms. */
1985 || TREE_CODE (parm
) != PARM_DECL
1986 || passed_type
== NULL
1987 || VOID_TYPE_P (nominal_type
))
1989 nominal_type
= passed_type
= void_type_node
;
1990 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
1994 /* Find mode of arg as it is passed, and mode of arg as it should be
1995 during execution of this function. */
1996 passed_mode
= TYPE_MODE (passed_type
);
1997 nominal_mode
= TYPE_MODE (nominal_type
);
1999 /* If the parm is to be passed as a transparent union, use the type of
2000 the first field for the tests below. We have already verified that
2001 the modes are the same. */
2002 if (DECL_TRANSPARENT_UNION (parm
)
2003 || (TREE_CODE (passed_type
) == UNION_TYPE
2004 && TYPE_TRANSPARENT_UNION (passed_type
)))
2005 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
2007 /* See if this arg was passed by invisible reference. */
2008 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2009 passed_type
, data
->named_arg
))
2011 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2012 data
->passed_pointer
= true;
2013 passed_mode
= nominal_mode
= Pmode
;
2016 /* Find mode as it is passed by the ABI. */
2017 promoted_mode
= passed_mode
;
2018 if (targetm
.calls
.promote_function_args (TREE_TYPE (current_function_decl
)))
2020 int unsignedp
= TYPE_UNSIGNED (passed_type
);
2021 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
2026 data
->nominal_type
= nominal_type
;
2027 data
->passed_type
= passed_type
;
2028 data
->nominal_mode
= nominal_mode
;
2029 data
->passed_mode
= passed_mode
;
2030 data
->promoted_mode
= promoted_mode
;
2033 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2036 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2037 struct assign_parm_data_one
*data
, bool no_rtl
)
2039 int varargs_pretend_bytes
= 0;
2041 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2042 data
->promoted_mode
,
2044 &varargs_pretend_bytes
, no_rtl
);
2046 /* If the back-end has requested extra stack space, record how much is
2047 needed. Do not change pretend_args_size otherwise since it may be
2048 nonzero from an earlier partial argument. */
2049 if (varargs_pretend_bytes
> 0)
2050 all
->pretend_args_size
= varargs_pretend_bytes
;
2053 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2054 the incoming location of the current parameter. */
2057 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2058 struct assign_parm_data_one
*data
)
2060 HOST_WIDE_INT pretend_bytes
= 0;
2064 if (data
->promoted_mode
== VOIDmode
)
2066 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2070 #ifdef FUNCTION_INCOMING_ARG
2071 entry_parm
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2072 data
->passed_type
, data
->named_arg
);
2074 entry_parm
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2075 data
->passed_type
, data
->named_arg
);
2078 if (entry_parm
== 0)
2079 data
->promoted_mode
= data
->passed_mode
;
2081 /* Determine parm's home in the stack, in case it arrives in the stack
2082 or we should pretend it did. Compute the stack position and rtx where
2083 the argument arrives and its size.
2085 There is one complexity here: If this was a parameter that would
2086 have been passed in registers, but wasn't only because it is
2087 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2088 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2089 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2090 as it was the previous time. */
2091 in_regs
= entry_parm
!= 0;
2092 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2095 if (!in_regs
&& !data
->named_arg
)
2097 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2100 #ifdef FUNCTION_INCOMING_ARG
2101 tem
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2102 data
->passed_type
, true);
2104 tem
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2105 data
->passed_type
, true);
2107 in_regs
= tem
!= NULL
;
2111 /* If this parameter was passed both in registers and in the stack, use
2112 the copy on the stack. */
2113 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2121 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2122 data
->promoted_mode
,
2125 data
->partial
= partial
;
2127 /* The caller might already have allocated stack space for the
2128 register parameters. */
2129 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2131 /* Part of this argument is passed in registers and part
2132 is passed on the stack. Ask the prologue code to extend
2133 the stack part so that we can recreate the full value.
2135 PRETEND_BYTES is the size of the registers we need to store.
2136 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2137 stack space that the prologue should allocate.
2139 Internally, gcc assumes that the argument pointer is aligned
2140 to STACK_BOUNDARY bits. This is used both for alignment
2141 optimizations (see init_emit) and to locate arguments that are
2142 aligned to more than PARM_BOUNDARY bits. We must preserve this
2143 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2144 a stack boundary. */
2146 /* We assume at most one partial arg, and it must be the first
2147 argument on the stack. */
2148 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2150 pretend_bytes
= partial
;
2151 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2153 /* We want to align relative to the actual stack pointer, so
2154 don't include this in the stack size until later. */
2155 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2159 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2160 entry_parm
? data
->partial
: 0, current_function_decl
,
2161 &all
->stack_args_size
, &data
->locate
);
2163 /* Adjust offsets to include the pretend args. */
2164 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2165 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2166 data
->locate
.offset
.constant
+= pretend_bytes
;
2168 data
->entry_parm
= entry_parm
;
2171 /* A subroutine of assign_parms. If there is actually space on the stack
2172 for this parm, count it in stack_args_size and return true. */
2175 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2176 struct assign_parm_data_one
*data
)
2178 /* Trivially true if we've no incoming register. */
2179 if (data
->entry_parm
== NULL
)
2181 /* Also true if we're partially in registers and partially not,
2182 since we've arranged to drop the entire argument on the stack. */
2183 else if (data
->partial
!= 0)
2185 /* Also true if the target says that it's passed in both registers
2186 and on the stack. */
2187 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2188 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2190 /* Also true if the target says that there's stack allocated for
2191 all register parameters. */
2192 else if (all
->reg_parm_stack_space
> 0)
2194 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2198 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2199 if (data
->locate
.size
.var
)
2200 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2205 /* A subroutine of assign_parms. Given that this parameter is allocated
2206 stack space by the ABI, find it. */
2209 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2211 rtx offset_rtx
, stack_parm
;
2212 unsigned int align
, boundary
;
2214 /* If we're passing this arg using a reg, make its stack home the
2215 aligned stack slot. */
2216 if (data
->entry_parm
)
2217 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2219 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2221 stack_parm
= current_function_internal_arg_pointer
;
2222 if (offset_rtx
!= const0_rtx
)
2223 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2224 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2226 set_mem_attributes (stack_parm
, parm
, 1);
2228 boundary
= data
->locate
.boundary
;
2229 align
= BITS_PER_UNIT
;
2231 /* If we're padding upward, we know that the alignment of the slot
2232 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2233 intentionally forcing upward padding. Otherwise we have to come
2234 up with a guess at the alignment based on OFFSET_RTX. */
2235 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2237 else if (GET_CODE (offset_rtx
) == CONST_INT
)
2239 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2240 align
= align
& -align
;
2242 set_mem_align (stack_parm
, align
);
2244 if (data
->entry_parm
)
2245 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2247 data
->stack_parm
= stack_parm
;
2250 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2251 always valid and contiguous. */
2254 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2256 rtx entry_parm
= data
->entry_parm
;
2257 rtx stack_parm
= data
->stack_parm
;
2259 /* If this parm was passed part in regs and part in memory, pretend it
2260 arrived entirely in memory by pushing the register-part onto the stack.
2261 In the special case of a DImode or DFmode that is split, we could put
2262 it together in a pseudoreg directly, but for now that's not worth
2264 if (data
->partial
!= 0)
2266 /* Handle calls that pass values in multiple non-contiguous
2267 locations. The Irix 6 ABI has examples of this. */
2268 if (GET_CODE (entry_parm
) == PARALLEL
)
2269 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2271 int_size_in_bytes (data
->passed_type
));
2274 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2275 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2276 data
->partial
/ UNITS_PER_WORD
);
2279 entry_parm
= stack_parm
;
2282 /* If we didn't decide this parm came in a register, by default it came
2284 else if (entry_parm
== NULL
)
2285 entry_parm
= stack_parm
;
2287 /* When an argument is passed in multiple locations, we can't make use
2288 of this information, but we can save some copying if the whole argument
2289 is passed in a single register. */
2290 else if (GET_CODE (entry_parm
) == PARALLEL
2291 && data
->nominal_mode
!= BLKmode
2292 && data
->passed_mode
!= BLKmode
)
2294 size_t i
, len
= XVECLEN (entry_parm
, 0);
2296 for (i
= 0; i
< len
; i
++)
2297 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2298 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2299 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2300 == data
->passed_mode
)
2301 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2303 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2308 data
->entry_parm
= entry_parm
;
2311 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2312 always valid and properly aligned. */
2315 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2317 rtx stack_parm
= data
->stack_parm
;
2319 /* If we can't trust the parm stack slot to be aligned enough for its
2320 ultimate type, don't use that slot after entry. We'll make another
2321 stack slot, if we need one. */
2323 && ((STRICT_ALIGNMENT
2324 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2325 || (data
->nominal_type
2326 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2327 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2330 /* If parm was passed in memory, and we need to convert it on entry,
2331 don't store it back in that same slot. */
2332 else if (data
->entry_parm
== stack_parm
2333 && data
->nominal_mode
!= BLKmode
2334 && data
->nominal_mode
!= data
->passed_mode
)
2337 data
->stack_parm
= stack_parm
;
2340 /* A subroutine of assign_parms. Return true if the current parameter
2341 should be stored as a BLKmode in the current frame. */
2344 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2346 if (data
->nominal_mode
== BLKmode
)
2348 if (GET_CODE (data
->entry_parm
) == PARALLEL
)
2351 #ifdef BLOCK_REG_PADDING
2352 /* Only assign_parm_setup_block knows how to deal with register arguments
2353 that are padded at the least significant end. */
2354 if (REG_P (data
->entry_parm
)
2355 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2356 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2357 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2364 /* A subroutine of assign_parms. Arrange for the parameter to be
2365 present and valid in DATA->STACK_RTL. */
2368 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2369 tree parm
, struct assign_parm_data_one
*data
)
2371 rtx entry_parm
= data
->entry_parm
;
2372 rtx stack_parm
= data
->stack_parm
;
2374 HOST_WIDE_INT size_stored
;
2375 rtx orig_entry_parm
= entry_parm
;
2377 if (GET_CODE (entry_parm
) == PARALLEL
)
2378 entry_parm
= emit_group_move_into_temps (entry_parm
);
2380 /* If we've a non-block object that's nevertheless passed in parts,
2381 reconstitute it in register operations rather than on the stack. */
2382 if (GET_CODE (entry_parm
) == PARALLEL
2383 && data
->nominal_mode
!= BLKmode
)
2385 rtx elt0
= XEXP (XVECEXP (orig_entry_parm
, 0, 0), 0);
2387 if ((XVECLEN (entry_parm
, 0) > 1
2388 || hard_regno_nregs
[REGNO (elt0
)][GET_MODE (elt0
)] > 1)
2389 && use_register_for_decl (parm
))
2391 rtx parmreg
= gen_reg_rtx (data
->nominal_mode
);
2393 push_to_sequence (all
->conversion_insns
);
2395 /* For values returned in multiple registers, handle possible
2396 incompatible calls to emit_group_store.
2398 For example, the following would be invalid, and would have to
2399 be fixed by the conditional below:
2401 emit_group_store ((reg:SF), (parallel:DF))
2402 emit_group_store ((reg:SI), (parallel:DI))
2404 An example of this are doubles in e500 v2:
2405 (parallel:DF (expr_list (reg:SI) (const_int 0))
2406 (expr_list (reg:SI) (const_int 4))). */
2407 if (data
->nominal_mode
!= data
->passed_mode
)
2409 rtx t
= gen_reg_rtx (GET_MODE (entry_parm
));
2410 emit_group_store (t
, entry_parm
, NULL_TREE
,
2411 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2412 convert_move (parmreg
, t
, 0);
2415 emit_group_store (parmreg
, entry_parm
, data
->nominal_type
,
2416 int_size_in_bytes (data
->nominal_type
));
2418 all
->conversion_insns
= get_insns ();
2421 SET_DECL_RTL (parm
, parmreg
);
2426 size
= int_size_in_bytes (data
->passed_type
);
2427 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2428 if (stack_parm
== 0)
2430 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2431 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2433 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2434 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2435 set_mem_attributes (stack_parm
, parm
, 1);
2438 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2439 calls that pass values in multiple non-contiguous locations. */
2440 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2444 /* Note that we will be storing an integral number of words.
2445 So we have to be careful to ensure that we allocate an
2446 integral number of words. We do this above when we call
2447 assign_stack_local if space was not allocated in the argument
2448 list. If it was, this will not work if PARM_BOUNDARY is not
2449 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2450 if it becomes a problem. Exception is when BLKmode arrives
2451 with arguments not conforming to word_mode. */
2453 if (data
->stack_parm
== 0)
2455 else if (GET_CODE (entry_parm
) == PARALLEL
)
2458 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2460 mem
= validize_mem (stack_parm
);
2462 /* Handle values in multiple non-contiguous locations. */
2463 if (GET_CODE (entry_parm
) == PARALLEL
)
2465 push_to_sequence (all
->conversion_insns
);
2466 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2467 all
->conversion_insns
= get_insns ();
2474 /* If SIZE is that of a mode no bigger than a word, just use
2475 that mode's store operation. */
2476 else if (size
<= UNITS_PER_WORD
)
2478 enum machine_mode mode
2479 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2482 #ifdef BLOCK_REG_PADDING
2483 && (size
== UNITS_PER_WORD
2484 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2485 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2489 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2490 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2493 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2494 machine must be aligned to the left before storing
2495 to memory. Note that the previous test doesn't
2496 handle all cases (e.g. SIZE == 3). */
2497 else if (size
!= UNITS_PER_WORD
2498 #ifdef BLOCK_REG_PADDING
2499 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2507 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2508 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2510 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2511 build_int_cst (NULL_TREE
, by
),
2513 tem
= change_address (mem
, word_mode
, 0);
2514 emit_move_insn (tem
, x
);
2517 move_block_from_reg (REGNO (entry_parm
), mem
,
2518 size_stored
/ UNITS_PER_WORD
);
2521 move_block_from_reg (REGNO (entry_parm
), mem
,
2522 size_stored
/ UNITS_PER_WORD
);
2524 else if (data
->stack_parm
== 0)
2526 push_to_sequence (all
->conversion_insns
);
2527 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2529 all
->conversion_insns
= get_insns ();
2533 data
->stack_parm
= stack_parm
;
2534 SET_DECL_RTL (parm
, stack_parm
);
2537 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2538 parameter. Get it there. Perform all ABI specified conversions. */
2541 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2542 struct assign_parm_data_one
*data
)
2545 enum machine_mode promoted_nominal_mode
;
2546 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2547 bool did_conversion
= false;
2549 /* Store the parm in a pseudoregister during the function, but we may
2550 need to do it in a wider mode. */
2552 promoted_nominal_mode
2553 = promote_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
, 0);
2555 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2557 if (!DECL_ARTIFICIAL (parm
))
2558 mark_user_reg (parmreg
);
2560 /* If this was an item that we received a pointer to,
2561 set DECL_RTL appropriately. */
2562 if (data
->passed_pointer
)
2564 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2565 set_mem_attributes (x
, parm
, 1);
2566 SET_DECL_RTL (parm
, x
);
2569 SET_DECL_RTL (parm
, parmreg
);
2571 /* Copy the value into the register. */
2572 if (data
->nominal_mode
!= data
->passed_mode
2573 || promoted_nominal_mode
!= data
->promoted_mode
)
2577 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2578 mode, by the caller. We now have to convert it to
2579 NOMINAL_MODE, if different. However, PARMREG may be in
2580 a different mode than NOMINAL_MODE if it is being stored
2583 If ENTRY_PARM is a hard register, it might be in a register
2584 not valid for operating in its mode (e.g., an odd-numbered
2585 register for a DFmode). In that case, moves are the only
2586 thing valid, so we can't do a convert from there. This
2587 occurs when the calling sequence allow such misaligned
2590 In addition, the conversion may involve a call, which could
2591 clobber parameters which haven't been copied to pseudo
2592 registers yet. Therefore, we must first copy the parm to
2593 a pseudo reg here, and save the conversion until after all
2594 parameters have been moved. */
2596 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2598 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2600 push_to_sequence (all
->conversion_insns
);
2601 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
2603 if (GET_CODE (tempreg
) == SUBREG
2604 && GET_MODE (tempreg
) == data
->nominal_mode
2605 && REG_P (SUBREG_REG (tempreg
))
2606 && data
->nominal_mode
== data
->passed_mode
2607 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
2608 && GET_MODE_SIZE (GET_MODE (tempreg
))
2609 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
2611 /* The argument is already sign/zero extended, so note it
2613 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
2614 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
2617 /* TREE_USED gets set erroneously during expand_assignment. */
2618 save_tree_used
= TREE_USED (parm
);
2619 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
));
2620 TREE_USED (parm
) = save_tree_used
;
2621 all
->conversion_insns
= get_insns ();
2624 did_conversion
= true;
2627 emit_move_insn (parmreg
, validize_mem (data
->entry_parm
));
2629 /* If we were passed a pointer but the actual value can safely live
2630 in a register, put it in one. */
2631 if (data
->passed_pointer
2632 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
2633 /* If by-reference argument was promoted, demote it. */
2634 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
2635 || use_register_for_decl (parm
)))
2637 /* We can't use nominal_mode, because it will have been set to
2638 Pmode above. We must use the actual mode of the parm. */
2639 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
2640 mark_user_reg (parmreg
);
2642 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
2644 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
2645 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2647 push_to_sequence (all
->conversion_insns
);
2648 emit_move_insn (tempreg
, DECL_RTL (parm
));
2649 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
2650 emit_move_insn (parmreg
, tempreg
);
2651 all
->conversion_insns
= get_insns ();
2654 did_conversion
= true;
2657 emit_move_insn (parmreg
, DECL_RTL (parm
));
2659 SET_DECL_RTL (parm
, parmreg
);
2661 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2663 data
->stack_parm
= NULL
;
2666 /* Mark the register as eliminable if we did no conversion and it was
2667 copied from memory at a fixed offset, and the arg pointer was not
2668 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2669 offset formed an invalid address, such memory-equivalences as we
2670 make here would screw up life analysis for it. */
2671 if (data
->nominal_mode
== data
->passed_mode
2673 && data
->stack_parm
!= 0
2674 && MEM_P (data
->stack_parm
)
2675 && data
->locate
.offset
.var
== 0
2676 && reg_mentioned_p (virtual_incoming_args_rtx
,
2677 XEXP (data
->stack_parm
, 0)))
2679 rtx linsn
= get_last_insn ();
2682 /* Mark complex types separately. */
2683 if (GET_CODE (parmreg
) == CONCAT
)
2685 enum machine_mode submode
2686 = GET_MODE_INNER (GET_MODE (parmreg
));
2687 int regnor
= REGNO (XEXP (parmreg
, 0));
2688 int regnoi
= REGNO (XEXP (parmreg
, 1));
2689 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
2690 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
2691 GET_MODE_SIZE (submode
));
2693 /* Scan backwards for the set of the real and
2695 for (sinsn
= linsn
; sinsn
!= 0;
2696 sinsn
= prev_nonnote_insn (sinsn
))
2698 set
= single_set (sinsn
);
2702 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
2704 = gen_rtx_EXPR_LIST (REG_EQUIV
, stacki
,
2706 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
2708 = gen_rtx_EXPR_LIST (REG_EQUIV
, stackr
,
2712 else if ((set
= single_set (linsn
)) != 0
2713 && SET_DEST (set
) == parmreg
)
2715 = gen_rtx_EXPR_LIST (REG_EQUIV
,
2716 data
->stack_parm
, REG_NOTES (linsn
));
2719 /* For pointer data type, suggest pointer register. */
2720 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
2721 mark_reg_pointer (parmreg
,
2722 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
2725 /* A subroutine of assign_parms. Allocate stack space to hold the current
2726 parameter. Get it there. Perform all ABI specified conversions. */
2729 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
2730 struct assign_parm_data_one
*data
)
2732 /* Value must be stored in the stack slot STACK_PARM during function
2734 bool to_conversion
= false;
2736 if (data
->promoted_mode
!= data
->nominal_mode
)
2738 /* Conversion is required. */
2739 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2741 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2743 push_to_sequence (all
->conversion_insns
);
2744 to_conversion
= true;
2746 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
2747 TYPE_UNSIGNED (TREE_TYPE (parm
)));
2749 if (data
->stack_parm
)
2750 /* ??? This may need a big-endian conversion on sparc64. */
2752 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
2755 if (data
->entry_parm
!= data
->stack_parm
)
2759 if (data
->stack_parm
== 0)
2762 = assign_stack_local (GET_MODE (data
->entry_parm
),
2763 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
2764 TYPE_ALIGN (data
->passed_type
));
2765 set_mem_attributes (data
->stack_parm
, parm
, 1);
2768 dest
= validize_mem (data
->stack_parm
);
2769 src
= validize_mem (data
->entry_parm
);
2773 /* Use a block move to handle potentially misaligned entry_parm. */
2775 push_to_sequence (all
->conversion_insns
);
2776 to_conversion
= true;
2778 emit_block_move (dest
, src
,
2779 GEN_INT (int_size_in_bytes (data
->passed_type
)),
2783 emit_move_insn (dest
, src
);
2788 all
->conversion_insns
= get_insns ();
2792 SET_DECL_RTL (parm
, data
->stack_parm
);
2795 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2796 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2799 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
, tree fnargs
)
2802 tree orig_fnargs
= all
->orig_fnargs
;
2804 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2806 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
2807 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
2809 rtx tmp
, real
, imag
;
2810 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
2812 real
= DECL_RTL (fnargs
);
2813 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
2814 if (inner
!= GET_MODE (real
))
2816 real
= gen_lowpart_SUBREG (inner
, real
);
2817 imag
= gen_lowpart_SUBREG (inner
, imag
);
2820 if (TREE_ADDRESSABLE (parm
))
2823 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
2825 /* split_complex_arg put the real and imag parts in
2826 pseudos. Move them to memory. */
2827 tmp
= assign_stack_local (DECL_MODE (parm
), size
,
2828 TYPE_ALIGN (TREE_TYPE (parm
)));
2829 set_mem_attributes (tmp
, parm
, 1);
2830 rmem
= adjust_address_nv (tmp
, inner
, 0);
2831 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
2832 push_to_sequence (all
->conversion_insns
);
2833 emit_move_insn (rmem
, real
);
2834 emit_move_insn (imem
, imag
);
2835 all
->conversion_insns
= get_insns ();
2839 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2840 SET_DECL_RTL (parm
, tmp
);
2842 real
= DECL_INCOMING_RTL (fnargs
);
2843 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
2844 if (inner
!= GET_MODE (real
))
2846 real
= gen_lowpart_SUBREG (inner
, real
);
2847 imag
= gen_lowpart_SUBREG (inner
, imag
);
2849 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2850 set_decl_incoming_rtl (parm
, tmp
);
2851 fnargs
= TREE_CHAIN (fnargs
);
2855 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
2856 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
));
2858 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2859 instead of the copy of decl, i.e. FNARGS. */
2860 if (DECL_INCOMING_RTL (parm
) && MEM_P (DECL_INCOMING_RTL (parm
)))
2861 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
2864 fnargs
= TREE_CHAIN (fnargs
);
2868 /* Assign RTL expressions to the function's parameters. This may involve
2869 copying them into registers and using those registers as the DECL_RTL. */
2872 assign_parms (tree fndecl
)
2874 struct assign_parm_data_all all
;
2876 rtx internal_arg_pointer
;
2878 /* If the reg that the virtual arg pointer will be translated into is
2879 not a fixed reg or is the stack pointer, make a copy of the virtual
2880 arg pointer, and address parms via the copy. The frame pointer is
2881 considered fixed even though it is not marked as such.
2883 The second time through, simply use ap to avoid generating rtx. */
2885 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
2886 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
2887 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
2888 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
2890 internal_arg_pointer
= virtual_incoming_args_rtx
;
2891 current_function_internal_arg_pointer
= internal_arg_pointer
;
2893 assign_parms_initialize_all (&all
);
2894 fnargs
= assign_parms_augmented_arg_list (&all
);
2896 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2898 struct assign_parm_data_one data
;
2900 /* Extract the type of PARM; adjust it according to ABI. */
2901 assign_parm_find_data_types (&all
, parm
, &data
);
2903 /* Early out for errors and void parameters. */
2904 if (data
.passed_mode
== VOIDmode
)
2906 SET_DECL_RTL (parm
, const0_rtx
);
2907 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
2911 if (current_function_stdarg
&& !TREE_CHAIN (parm
))
2912 assign_parms_setup_varargs (&all
, &data
, false);
2914 /* Find out where the parameter arrives in this function. */
2915 assign_parm_find_entry_rtl (&all
, &data
);
2917 /* Find out where stack space for this parameter might be. */
2918 if (assign_parm_is_stack_parm (&all
, &data
))
2920 assign_parm_find_stack_rtl (parm
, &data
);
2921 assign_parm_adjust_entry_rtl (&data
);
2924 /* Record permanently how this parm was passed. */
2925 set_decl_incoming_rtl (parm
, data
.entry_parm
);
2927 /* Update info on where next arg arrives in registers. */
2928 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
2929 data
.passed_type
, data
.named_arg
);
2931 assign_parm_adjust_stack_rtl (&data
);
2933 if (assign_parm_setup_block_p (&data
))
2934 assign_parm_setup_block (&all
, parm
, &data
);
2935 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
2936 assign_parm_setup_reg (&all
, parm
, &data
);
2938 assign_parm_setup_stack (&all
, parm
, &data
);
2941 if (targetm
.calls
.split_complex_arg
&& fnargs
!= all
.orig_fnargs
)
2942 assign_parms_unsplit_complex (&all
, fnargs
);
2944 /* Output all parameter conversion instructions (possibly including calls)
2945 now that all parameters have been copied out of hard registers. */
2946 emit_insn (all
.conversion_insns
);
2948 /* If we are receiving a struct value address as the first argument, set up
2949 the RTL for the function result. As this might require code to convert
2950 the transmitted address to Pmode, we do this here to ensure that possible
2951 preliminary conversions of the address have been emitted already. */
2952 if (all
.function_result_decl
)
2954 tree result
= DECL_RESULT (current_function_decl
);
2955 rtx addr
= DECL_RTL (all
.function_result_decl
);
2958 if (DECL_BY_REFERENCE (result
))
2962 addr
= convert_memory_address (Pmode
, addr
);
2963 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
2964 set_mem_attributes (x
, result
, 1);
2966 SET_DECL_RTL (result
, x
);
2969 /* We have aligned all the args, so add space for the pretend args. */
2970 current_function_pretend_args_size
= all
.pretend_args_size
;
2971 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
2972 current_function_args_size
= all
.stack_args_size
.constant
;
2974 /* Adjust function incoming argument size for alignment and
2977 #ifdef REG_PARM_STACK_SPACE
2978 current_function_args_size
= MAX (current_function_args_size
,
2979 REG_PARM_STACK_SPACE (fndecl
));
2982 current_function_args_size
2983 = ((current_function_args_size
+ STACK_BYTES
- 1)
2984 / STACK_BYTES
) * STACK_BYTES
;
2986 #ifdef ARGS_GROW_DOWNWARD
2987 current_function_arg_offset_rtx
2988 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
2989 : expand_expr (size_diffop (all
.stack_args_size
.var
,
2990 size_int (-all
.stack_args_size
.constant
)),
2991 NULL_RTX
, VOIDmode
, 0));
2993 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
2996 /* See how many bytes, if any, of its args a function should try to pop
2999 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3000 current_function_args_size
);
3002 /* For stdarg.h function, save info about
3003 regs and stack space used by the named args. */
3005 current_function_args_info
= all
.args_so_far
;
3007 /* Set the rtx used for the function return value. Put this in its
3008 own variable so any optimizers that need this information don't have
3009 to include tree.h. Do this here so it gets done when an inlined
3010 function gets output. */
3012 current_function_return_rtx
3013 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3014 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3016 /* If scalar return value was computed in a pseudo-reg, or was a named
3017 return value that got dumped to the stack, copy that to the hard
3019 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3021 tree decl_result
= DECL_RESULT (fndecl
);
3022 rtx decl_rtl
= DECL_RTL (decl_result
);
3024 if (REG_P (decl_rtl
)
3025 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3026 : DECL_REGISTER (decl_result
))
3030 #ifdef FUNCTION_OUTGOING_VALUE
3031 real_decl_rtl
= FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result
),
3034 real_decl_rtl
= FUNCTION_VALUE (TREE_TYPE (decl_result
),
3037 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3038 /* The delay slot scheduler assumes that current_function_return_rtx
3039 holds the hard register containing the return value, not a
3040 temporary pseudo. */
3041 current_function_return_rtx
= real_decl_rtl
;
3046 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3047 For all seen types, gimplify their sizes. */
3050 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3057 if (POINTER_TYPE_P (t
))
3059 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3060 && !TYPE_SIZES_GIMPLIFIED (t
))
3062 gimplify_type_sizes (t
, (tree
*) data
);
3070 /* Gimplify the parameter list for current_function_decl. This involves
3071 evaluating SAVE_EXPRs of variable sized parameters and generating code
3072 to implement callee-copies reference parameters. Returns a list of
3073 statements to add to the beginning of the function, or NULL if nothing
3077 gimplify_parameters (void)
3079 struct assign_parm_data_all all
;
3080 tree fnargs
, parm
, stmts
= NULL
;
3082 assign_parms_initialize_all (&all
);
3083 fnargs
= assign_parms_augmented_arg_list (&all
);
3085 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3087 struct assign_parm_data_one data
;
3089 /* Extract the type of PARM; adjust it according to ABI. */
3090 assign_parm_find_data_types (&all
, parm
, &data
);
3092 /* Early out for errors and void parameters. */
3093 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3096 /* Update info on where next arg arrives in registers. */
3097 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3098 data
.passed_type
, data
.named_arg
);
3100 /* ??? Once upon a time variable_size stuffed parameter list
3101 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3102 turned out to be less than manageable in the gimple world.
3103 Now we have to hunt them down ourselves. */
3104 walk_tree_without_duplicates (&data
.passed_type
,
3105 gimplify_parm_type
, &stmts
);
3107 if (!TREE_CONSTANT (DECL_SIZE (parm
)))
3109 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3110 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3113 if (data
.passed_pointer
)
3115 tree type
= TREE_TYPE (data
.passed_type
);
3116 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3117 type
, data
.named_arg
))
3121 /* For constant sized objects, this is trivial; for
3122 variable-sized objects, we have to play games. */
3123 if (TREE_CONSTANT (DECL_SIZE (parm
)))
3125 local
= create_tmp_var (type
, get_name (parm
));
3126 DECL_IGNORED_P (local
) = 0;
3130 tree ptr_type
, addr
, args
;
3132 ptr_type
= build_pointer_type (type
);
3133 addr
= create_tmp_var (ptr_type
, get_name (parm
));
3134 DECL_IGNORED_P (addr
) = 0;
3135 local
= build_fold_indirect_ref (addr
);
3137 args
= tree_cons (NULL
, DECL_SIZE_UNIT (parm
), NULL
);
3138 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3139 t
= build_function_call_expr (t
, args
);
3140 t
= fold_convert (ptr_type
, t
);
3141 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
3142 gimplify_and_add (t
, &stmts
);
3145 t
= build2 (MODIFY_EXPR
, void_type_node
, local
, parm
);
3146 gimplify_and_add (t
, &stmts
);
3148 SET_DECL_VALUE_EXPR (parm
, local
);
3149 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3157 /* Indicate whether REGNO is an incoming argument to the current function
3158 that was promoted to a wider mode. If so, return the RTX for the
3159 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3160 that REGNO is promoted from and whether the promotion was signed or
3164 promoted_input_arg (unsigned int regno
, enum machine_mode
*pmode
, int *punsignedp
)
3168 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
3169 arg
= TREE_CHAIN (arg
))
3170 if (REG_P (DECL_INCOMING_RTL (arg
))
3171 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
3172 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
3174 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
3175 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (arg
));
3177 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
3178 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
3179 && mode
!= DECL_MODE (arg
))
3181 *pmode
= DECL_MODE (arg
);
3182 *punsignedp
= unsignedp
;
3183 return DECL_INCOMING_RTL (arg
);
3191 /* Compute the size and offset from the start of the stacked arguments for a
3192 parm passed in mode PASSED_MODE and with type TYPE.
3194 INITIAL_OFFSET_PTR points to the current offset into the stacked
3197 The starting offset and size for this parm are returned in
3198 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3199 nonzero, the offset is that of stack slot, which is returned in
3200 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3201 padding required from the initial offset ptr to the stack slot.
3203 IN_REGS is nonzero if the argument will be passed in registers. It will
3204 never be set if REG_PARM_STACK_SPACE is not defined.
3206 FNDECL is the function in which the argument was defined.
3208 There are two types of rounding that are done. The first, controlled by
3209 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3210 list to be aligned to the specific boundary (in bits). This rounding
3211 affects the initial and starting offsets, but not the argument size.
3213 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3214 optionally rounds the size of the parm to PARM_BOUNDARY. The
3215 initial offset is not affected by this rounding, while the size always
3216 is and the starting offset may be. */
3218 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3219 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3220 callers pass in the total size of args so far as
3221 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3224 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3225 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3226 struct args_size
*initial_offset_ptr
,
3227 struct locate_and_pad_arg_data
*locate
)
3230 enum direction where_pad
;
3232 int reg_parm_stack_space
= 0;
3233 int part_size_in_regs
;
3235 #ifdef REG_PARM_STACK_SPACE
3236 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3238 /* If we have found a stack parm before we reach the end of the
3239 area reserved for registers, skip that area. */
3242 if (reg_parm_stack_space
> 0)
3244 if (initial_offset_ptr
->var
)
3246 initial_offset_ptr
->var
3247 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3248 ssize_int (reg_parm_stack_space
));
3249 initial_offset_ptr
->constant
= 0;
3251 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3252 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3255 #endif /* REG_PARM_STACK_SPACE */
3257 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3260 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3261 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3262 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3263 locate
->where_pad
= where_pad
;
3264 locate
->boundary
= boundary
;
3266 #ifdef ARGS_GROW_DOWNWARD
3267 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3268 if (initial_offset_ptr
->var
)
3269 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3270 initial_offset_ptr
->var
);
3274 if (where_pad
!= none
3275 && (!host_integerp (sizetree
, 1)
3276 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3277 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3278 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3281 locate
->slot_offset
.constant
+= part_size_in_regs
;
3284 #ifdef REG_PARM_STACK_SPACE
3285 || REG_PARM_STACK_SPACE (fndecl
) > 0
3288 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3289 &locate
->alignment_pad
);
3291 locate
->size
.constant
= (-initial_offset_ptr
->constant
3292 - locate
->slot_offset
.constant
);
3293 if (initial_offset_ptr
->var
)
3294 locate
->size
.var
= size_binop (MINUS_EXPR
,
3295 size_binop (MINUS_EXPR
,
3297 initial_offset_ptr
->var
),
3298 locate
->slot_offset
.var
);
3300 /* Pad_below needs the pre-rounded size to know how much to pad
3302 locate
->offset
= locate
->slot_offset
;
3303 if (where_pad
== downward
)
3304 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3306 #else /* !ARGS_GROW_DOWNWARD */
3308 #ifdef REG_PARM_STACK_SPACE
3309 || REG_PARM_STACK_SPACE (fndecl
) > 0
3312 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3313 &locate
->alignment_pad
);
3314 locate
->slot_offset
= *initial_offset_ptr
;
3316 #ifdef PUSH_ROUNDING
3317 if (passed_mode
!= BLKmode
)
3318 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3321 /* Pad_below needs the pre-rounded size to know how much to pad below
3322 so this must be done before rounding up. */
3323 locate
->offset
= locate
->slot_offset
;
3324 if (where_pad
== downward
)
3325 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3327 if (where_pad
!= none
3328 && (!host_integerp (sizetree
, 1)
3329 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3330 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3332 ADD_PARM_SIZE (locate
->size
, sizetree
);
3334 locate
->size
.constant
-= part_size_in_regs
;
3335 #endif /* ARGS_GROW_DOWNWARD */
3338 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3339 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3342 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3343 struct args_size
*alignment_pad
)
3345 tree save_var
= NULL_TREE
;
3346 HOST_WIDE_INT save_constant
= 0;
3347 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3348 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3350 #ifdef SPARC_STACK_BOUNDARY_HACK
3351 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3352 higher than the real alignment of %sp. However, when it does this,
3353 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3354 This is a temporary hack while the sparc port is fixed. */
3355 if (SPARC_STACK_BOUNDARY_HACK
)
3359 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3361 save_var
= offset_ptr
->var
;
3362 save_constant
= offset_ptr
->constant
;
3365 alignment_pad
->var
= NULL_TREE
;
3366 alignment_pad
->constant
= 0;
3368 if (boundary
> BITS_PER_UNIT
)
3370 if (offset_ptr
->var
)
3372 tree sp_offset_tree
= ssize_int (sp_offset
);
3373 tree offset
= size_binop (PLUS_EXPR
,
3374 ARGS_SIZE_TREE (*offset_ptr
),
3376 #ifdef ARGS_GROW_DOWNWARD
3377 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3379 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3382 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3383 /* ARGS_SIZE_TREE includes constant term. */
3384 offset_ptr
->constant
= 0;
3385 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3386 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3391 offset_ptr
->constant
= -sp_offset
+
3392 #ifdef ARGS_GROW_DOWNWARD
3393 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3395 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3397 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3398 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3404 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3406 if (passed_mode
!= BLKmode
)
3408 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3409 offset_ptr
->constant
3410 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3411 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3412 - GET_MODE_SIZE (passed_mode
));
3416 if (TREE_CODE (sizetree
) != INTEGER_CST
3417 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3419 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3420 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3422 ADD_PARM_SIZE (*offset_ptr
, s2
);
3423 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3428 /* Walk the tree of blocks describing the binding levels within a function
3429 and warn about variables the might be killed by setjmp or vfork.
3430 This is done after calling flow_analysis and before global_alloc
3431 clobbers the pseudo-regs to hard regs. */
3434 setjmp_vars_warning (tree block
)
3438 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3440 if (TREE_CODE (decl
) == VAR_DECL
3441 && DECL_RTL_SET_P (decl
)
3442 && REG_P (DECL_RTL (decl
))
3443 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3444 warning (0, "%Jvariable %qD might be clobbered by %<longjmp%>"
3449 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3450 setjmp_vars_warning (sub
);
3453 /* Do the appropriate part of setjmp_vars_warning
3454 but for arguments instead of local variables. */
3457 setjmp_args_warning (void)
3460 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3461 decl
; decl
= TREE_CHAIN (decl
))
3462 if (DECL_RTL (decl
) != 0
3463 && REG_P (DECL_RTL (decl
))
3464 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3465 warning (0, "%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
3470 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3471 and create duplicate blocks. */
3472 /* ??? Need an option to either create block fragments or to create
3473 abstract origin duplicates of a source block. It really depends
3474 on what optimization has been performed. */
3477 reorder_blocks (void)
3479 tree block
= DECL_INITIAL (current_function_decl
);
3480 VEC(tree
,heap
) *block_stack
;
3482 if (block
== NULL_TREE
)
3485 block_stack
= VEC_alloc (tree
, heap
, 10);
3487 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3488 clear_block_marks (block
);
3490 /* Prune the old trees away, so that they don't get in the way. */
3491 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
3492 BLOCK_CHAIN (block
) = NULL_TREE
;
3494 /* Recreate the block tree from the note nesting. */
3495 reorder_blocks_1 (get_insns (), block
, &block_stack
);
3496 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
3498 /* Remove deleted blocks from the block fragment chains. */
3499 reorder_fix_fragments (block
);
3501 VEC_free (tree
, heap
, block_stack
);
3504 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3507 clear_block_marks (tree block
)
3511 TREE_ASM_WRITTEN (block
) = 0;
3512 clear_block_marks (BLOCK_SUBBLOCKS (block
));
3513 block
= BLOCK_CHAIN (block
);
3518 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
3522 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3526 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
3528 tree block
= NOTE_BLOCK (insn
);
3530 /* If we have seen this block before, that means it now
3531 spans multiple address regions. Create a new fragment. */
3532 if (TREE_ASM_WRITTEN (block
))
3534 tree new_block
= copy_node (block
);
3537 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
3538 ? BLOCK_FRAGMENT_ORIGIN (block
)
3540 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
3541 BLOCK_FRAGMENT_CHAIN (new_block
)
3542 = BLOCK_FRAGMENT_CHAIN (origin
);
3543 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
3545 NOTE_BLOCK (insn
) = new_block
;
3549 BLOCK_SUBBLOCKS (block
) = 0;
3550 TREE_ASM_WRITTEN (block
) = 1;
3551 /* When there's only one block for the entire function,
3552 current_block == block and we mustn't do this, it
3553 will cause infinite recursion. */
3554 if (block
!= current_block
)
3556 BLOCK_SUPERCONTEXT (block
) = current_block
;
3557 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
3558 BLOCK_SUBBLOCKS (current_block
) = block
;
3559 current_block
= block
;
3561 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
3563 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
3565 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
3566 BLOCK_SUBBLOCKS (current_block
)
3567 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
3568 current_block
= BLOCK_SUPERCONTEXT (current_block
);
3574 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3575 appears in the block tree, select one of the fragments to become
3576 the new origin block. */
3579 reorder_fix_fragments (tree block
)
3583 tree dup_origin
= BLOCK_FRAGMENT_ORIGIN (block
);
3584 tree new_origin
= NULL_TREE
;
3588 if (! TREE_ASM_WRITTEN (dup_origin
))
3590 new_origin
= BLOCK_FRAGMENT_CHAIN (dup_origin
);
3592 /* Find the first of the remaining fragments. There must
3593 be at least one -- the current block. */
3594 while (! TREE_ASM_WRITTEN (new_origin
))
3595 new_origin
= BLOCK_FRAGMENT_CHAIN (new_origin
);
3596 BLOCK_FRAGMENT_ORIGIN (new_origin
) = NULL_TREE
;
3599 else if (! dup_origin
)
3602 /* Re-root the rest of the fragments to the new origin. In the
3603 case that DUP_ORIGIN was null, that means BLOCK was the origin
3604 of a chain of fragments and we want to remove those fragments
3605 that didn't make it to the output. */
3608 tree
*pp
= &BLOCK_FRAGMENT_CHAIN (new_origin
);
3613 if (TREE_ASM_WRITTEN (chain
))
3615 BLOCK_FRAGMENT_ORIGIN (chain
) = new_origin
;
3617 pp
= &BLOCK_FRAGMENT_CHAIN (chain
);
3619 chain
= BLOCK_FRAGMENT_CHAIN (chain
);
3624 reorder_fix_fragments (BLOCK_SUBBLOCKS (block
));
3625 block
= BLOCK_CHAIN (block
);
3629 /* Reverse the order of elements in the chain T of blocks,
3630 and return the new head of the chain (old last element). */
3633 blocks_nreverse (tree t
)
3635 tree prev
= 0, decl
, next
;
3636 for (decl
= t
; decl
; decl
= next
)
3638 next
= BLOCK_CHAIN (decl
);
3639 BLOCK_CHAIN (decl
) = prev
;
3645 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3646 non-NULL, list them all into VECTOR, in a depth-first preorder
3647 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3651 all_blocks (tree block
, tree
*vector
)
3657 TREE_ASM_WRITTEN (block
) = 0;
3659 /* Record this block. */
3661 vector
[n_blocks
] = block
;
3665 /* Record the subblocks, and their subblocks... */
3666 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
3667 vector
? vector
+ n_blocks
: 0);
3668 block
= BLOCK_CHAIN (block
);
3674 /* Return a vector containing all the blocks rooted at BLOCK. The
3675 number of elements in the vector is stored in N_BLOCKS_P. The
3676 vector is dynamically allocated; it is the caller's responsibility
3677 to call `free' on the pointer returned. */
3680 get_block_vector (tree block
, int *n_blocks_p
)
3684 *n_blocks_p
= all_blocks (block
, NULL
);
3685 block_vector
= xmalloc (*n_blocks_p
* sizeof (tree
));
3686 all_blocks (block
, block_vector
);
3688 return block_vector
;
3691 static GTY(()) int next_block_index
= 2;
3693 /* Set BLOCK_NUMBER for all the blocks in FN. */
3696 number_blocks (tree fn
)
3702 /* For SDB and XCOFF debugging output, we start numbering the blocks
3703 from 1 within each function, rather than keeping a running
3705 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3706 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
3707 next_block_index
= 1;
3710 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
3712 /* The top-level BLOCK isn't numbered at all. */
3713 for (i
= 1; i
< n_blocks
; ++i
)
3714 /* We number the blocks from two. */
3715 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
3717 free (block_vector
);
3722 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3725 debug_find_var_in_block_tree (tree var
, tree block
)
3729 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
3733 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
3735 tree ret
= debug_find_var_in_block_tree (var
, t
);
3743 /* Allocate a function structure for FNDECL and set its contents
3747 allocate_struct_function (tree fndecl
)
3750 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
3752 cfun
= ggc_alloc_cleared (sizeof (struct function
));
3754 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
3755 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
3757 current_function_funcdef_no
= funcdef_no
++;
3759 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
3761 init_eh_for_function ();
3763 lang_hooks
.function
.init (cfun
);
3764 if (init_machine_status
)
3765 cfun
->machine
= (*init_machine_status
) ();
3770 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
3771 cfun
->decl
= fndecl
;
3773 result
= DECL_RESULT (fndecl
);
3774 if (aggregate_value_p (result
, fndecl
))
3776 #ifdef PCC_STATIC_STRUCT_RETURN
3777 current_function_returns_pcc_struct
= 1;
3779 current_function_returns_struct
= 1;
3782 current_function_returns_pointer
= POINTER_TYPE_P (TREE_TYPE (result
));
3784 current_function_stdarg
3786 && TYPE_ARG_TYPES (fntype
) != 0
3787 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3788 != void_type_node
));
3790 /* Assume all registers in stdarg functions need to be saved. */
3791 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
3792 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
3795 /* Reset cfun, and other non-struct-function variables to defaults as
3796 appropriate for emitting rtl at the start of a function. */
3799 prepare_function_start (tree fndecl
)
3801 if (fndecl
&& DECL_STRUCT_FUNCTION (fndecl
))
3802 cfun
= DECL_STRUCT_FUNCTION (fndecl
);
3804 allocate_struct_function (fndecl
);
3806 init_varasm_status (cfun
);
3809 cse_not_expected
= ! optimize
;
3811 /* Caller save not needed yet. */
3812 caller_save_needed
= 0;
3814 /* We haven't done register allocation yet. */
3817 /* Indicate that we have not instantiated virtual registers yet. */
3818 virtuals_instantiated
= 0;
3820 /* Indicate that we want CONCATs now. */
3821 generating_concat_p
= 1;
3823 /* Indicate we have no need of a frame pointer yet. */
3824 frame_pointer_needed
= 0;
3827 /* Initialize the rtl expansion mechanism so that we can do simple things
3828 like generate sequences. This is used to provide a context during global
3829 initialization of some passes. */
3831 init_dummy_function_start (void)
3833 prepare_function_start (NULL
);
3836 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3837 and initialize static variables for generating RTL for the statements
3841 init_function_start (tree subr
)
3843 prepare_function_start (subr
);
3845 /* Prevent ever trying to delete the first instruction of a
3846 function. Also tell final how to output a linenum before the
3847 function prologue. Note linenums could be missing, e.g. when
3848 compiling a Java .class file. */
3849 if (! DECL_IS_BUILTIN (subr
))
3850 emit_line_note (DECL_SOURCE_LOCATION (subr
));
3852 /* Make sure first insn is a note even if we don't want linenums.
3853 This makes sure the first insn will never be deleted.
3854 Also, final expects a note to appear there. */
3855 emit_note (NOTE_INSN_DELETED
);
3857 /* Warn if this value is an aggregate type,
3858 regardless of which calling convention we are using for it. */
3859 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
3860 warning (OPT_Waggregate_return
, "function returns an aggregate");
3863 /* Make sure all values used by the optimization passes have sane
3866 init_function_for_compilation (void)
3870 /* No prologue/epilogue insns yet. Make sure that these vectors are
3872 gcc_assert (VEC_length (int, prologue
) == 0);
3873 gcc_assert (VEC_length (int, epilogue
) == 0);
3874 gcc_assert (VEC_length (int, sibcall_epilogue
) == 0);
3878 expand_main_function (void)
3880 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3881 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
)
3883 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
3887 /* Forcibly align the stack. */
3888 #ifdef STACK_GROWS_DOWNWARD
3889 tmp
= expand_simple_binop (Pmode
, AND
, stack_pointer_rtx
, GEN_INT(-align
),
3890 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
3892 tmp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3893 GEN_INT (align
- 1), NULL_RTX
, 1, OPTAB_WIDEN
);
3894 tmp
= expand_simple_binop (Pmode
, AND
, tmp
, GEN_INT (-align
),
3895 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
3897 if (tmp
!= stack_pointer_rtx
)
3898 emit_move_insn (stack_pointer_rtx
, tmp
);
3900 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3901 tmp
= force_reg (Pmode
, const0_rtx
);
3902 allocate_dynamic_stack_space (tmp
, NULL_RTX
, BIGGEST_ALIGNMENT
);
3906 for (tmp
= get_last_insn (); tmp
; tmp
= PREV_INSN (tmp
))
3907 if (NOTE_P (tmp
) && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_FUNCTION_BEG
)
3910 emit_insn_before (seq
, tmp
);
3916 #if (defined(INVOKE__main) \
3917 || (!defined(HAS_INIT_SECTION) \
3918 && !defined(INIT_SECTION_ASM_OP) \
3919 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3920 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
3924 /* Start the RTL for a new function, and set variables used for
3926 SUBR is the FUNCTION_DECL node.
3927 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3928 the function's parameters, which must be run at any return statement. */
3931 expand_function_start (tree subr
)
3933 /* Make sure volatile mem refs aren't considered
3934 valid operands of arithmetic insns. */
3935 init_recog_no_volatile ();
3937 current_function_profile
3939 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
3941 current_function_limit_stack
3942 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
3944 /* Make the label for return statements to jump to. Do not special
3945 case machines with special return instructions -- they will be
3946 handled later during jump, ifcvt, or epilogue creation. */
3947 return_label
= gen_label_rtx ();
3949 /* Initialize rtx used to return the value. */
3950 /* Do this before assign_parms so that we copy the struct value address
3951 before any library calls that assign parms might generate. */
3953 /* Decide whether to return the value in memory or in a register. */
3954 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
3956 /* Returning something that won't go in a register. */
3957 rtx value_address
= 0;
3959 #ifdef PCC_STATIC_STRUCT_RETURN
3960 if (current_function_returns_pcc_struct
)
3962 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
3963 value_address
= assemble_static_space (size
);
3968 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 1);
3969 /* Expect to be passed the address of a place to store the value.
3970 If it is passed as an argument, assign_parms will take care of
3974 value_address
= gen_reg_rtx (Pmode
);
3975 emit_move_insn (value_address
, sv
);
3980 rtx x
= value_address
;
3981 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
3983 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
3984 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
3986 SET_DECL_RTL (DECL_RESULT (subr
), x
);
3989 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
3990 /* If return mode is void, this decl rtl should not be used. */
3991 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
3994 /* Compute the return values into a pseudo reg, which we will copy
3995 into the true return register after the cleanups are done. */
3996 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
3997 if (TYPE_MODE (return_type
) != BLKmode
3998 && targetm
.calls
.return_in_msb (return_type
))
3999 /* expand_function_end will insert the appropriate padding in
4000 this case. Use the return value's natural (unpadded) mode
4001 within the function proper. */
4002 SET_DECL_RTL (DECL_RESULT (subr
),
4003 gen_reg_rtx (TYPE_MODE (return_type
)));
4006 /* In order to figure out what mode to use for the pseudo, we
4007 figure out what the mode of the eventual return register will
4008 actually be, and use that. */
4009 rtx hard_reg
= hard_function_value (return_type
, subr
, 1);
4011 /* Structures that are returned in registers are not
4012 aggregate_value_p, so we may see a PARALLEL or a REG. */
4013 if (REG_P (hard_reg
))
4014 SET_DECL_RTL (DECL_RESULT (subr
),
4015 gen_reg_rtx (GET_MODE (hard_reg
)));
4018 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4019 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4023 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4024 result to the real return register(s). */
4025 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4028 /* Initialize rtx for parameters and local variables.
4029 In some cases this requires emitting insns. */
4030 assign_parms (subr
);
4032 /* If function gets a static chain arg, store it. */
4033 if (cfun
->static_chain_decl
)
4035 tree parm
= cfun
->static_chain_decl
;
4036 rtx local
= gen_reg_rtx (Pmode
);
4038 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
);
4039 SET_DECL_RTL (parm
, local
);
4040 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4042 emit_move_insn (local
, static_chain_incoming_rtx
);
4045 /* If the function receives a non-local goto, then store the
4046 bits we need to restore the frame pointer. */
4047 if (cfun
->nonlocal_goto_save_area
)
4052 /* ??? We need to do this save early. Unfortunately here is
4053 before the frame variable gets declared. Help out... */
4054 expand_var (TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0));
4056 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4057 cfun
->nonlocal_goto_save_area
,
4058 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4059 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4060 r_save
= convert_memory_address (Pmode
, r_save
);
4062 emit_move_insn (r_save
, virtual_stack_vars_rtx
);
4063 update_nonlocal_goto_save_area ();
4066 /* The following was moved from init_function_start.
4067 The move is supposed to make sdb output more accurate. */
4068 /* Indicate the beginning of the function body,
4069 as opposed to parm setup. */
4070 emit_note (NOTE_INSN_FUNCTION_BEG
);
4072 if (!NOTE_P (get_last_insn ()))
4073 emit_note (NOTE_INSN_DELETED
);
4074 parm_birth_insn
= get_last_insn ();
4076 if (current_function_profile
)
4079 PROFILE_HOOK (current_function_funcdef_no
);
4083 /* After the display initializations is where the tail-recursion label
4084 should go, if we end up needing one. Ensure we have a NOTE here
4085 since some things (like trampolines) get placed before this. */
4086 tail_recursion_reentry
= emit_note (NOTE_INSN_DELETED
);
4088 /* Make sure there is a line number after the function entry setup code. */
4089 force_next_line_note ();
4092 /* Undo the effects of init_dummy_function_start. */
4094 expand_dummy_function_end (void)
4096 /* End any sequences that failed to be closed due to syntax errors. */
4097 while (in_sequence_p ())
4100 /* Outside function body, can't compute type's actual size
4101 until next function's body starts. */
4103 free_after_parsing (cfun
);
4104 free_after_compilation (cfun
);
4108 /* Call DOIT for each hard register used as a return value from
4109 the current function. */
4112 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4114 rtx outgoing
= current_function_return_rtx
;
4119 if (REG_P (outgoing
))
4120 (*doit
) (outgoing
, arg
);
4121 else if (GET_CODE (outgoing
) == PARALLEL
)
4125 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4127 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4129 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4136 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4138 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
4142 clobber_return_register (void)
4144 diddle_return_value (do_clobber_return_reg
, NULL
);
4146 /* In case we do use pseudo to return value, clobber it too. */
4147 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4149 tree decl_result
= DECL_RESULT (current_function_decl
);
4150 rtx decl_rtl
= DECL_RTL (decl_result
);
4151 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4153 do_clobber_return_reg (decl_rtl
, NULL
);
4159 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4161 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
4165 use_return_register (void)
4167 diddle_return_value (do_use_return_reg
, NULL
);
4170 /* Possibly warn about unused parameters. */
4172 do_warn_unused_parameter (tree fn
)
4176 for (decl
= DECL_ARGUMENTS (fn
);
4177 decl
; decl
= TREE_CHAIN (decl
))
4178 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4179 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
))
4180 warning (0, "%Junused parameter %qD", decl
, decl
);
4183 static GTY(()) rtx initial_trampoline
;
4185 /* Generate RTL for the end of the current function. */
4188 expand_function_end (void)
4192 /* If arg_pointer_save_area was referenced only from a nested
4193 function, we will not have initialized it yet. Do that now. */
4194 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
4195 get_arg_pointer_save_area (cfun
);
4197 /* If we are doing stack checking and this function makes calls,
4198 do a stack probe at the start of the function to ensure we have enough
4199 space for another stack frame. */
4200 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
4204 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4208 probe_stack_range (STACK_CHECK_PROTECT
,
4209 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
4212 emit_insn_before (seq
, tail_recursion_reentry
);
4217 /* Possibly warn about unused parameters.
4218 When frontend does unit-at-a-time, the warning is already
4219 issued at finalization time. */
4220 if (warn_unused_parameter
4221 && !lang_hooks
.callgraph
.expand_function
)
4222 do_warn_unused_parameter (current_function_decl
);
4224 /* End any sequences that failed to be closed due to syntax errors. */
4225 while (in_sequence_p ())
4228 clear_pending_stack_adjust ();
4229 do_pending_stack_adjust ();
4231 /* @@@ This is a kludge. We want to ensure that instructions that
4232 may trap are not moved into the epilogue by scheduling, because
4233 we don't always emit unwind information for the epilogue.
4234 However, not all machine descriptions define a blockage insn, so
4235 emit an ASM_INPUT to act as one. */
4236 if (flag_non_call_exceptions
)
4237 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
4239 /* Mark the end of the function body.
4240 If control reaches this insn, the function can drop through
4241 without returning a value. */
4242 emit_note (NOTE_INSN_FUNCTION_END
);
4244 /* Must mark the last line number note in the function, so that the test
4245 coverage code can avoid counting the last line twice. This just tells
4246 the code to ignore the immediately following line note, since there
4247 already exists a copy of this note somewhere above. This line number
4248 note is still needed for debugging though, so we can't delete it. */
4249 if (flag_test_coverage
)
4250 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER
);
4252 /* Output a linenumber for the end of the function.
4253 SDB depends on this. */
4254 force_next_line_note ();
4255 emit_line_note (input_location
);
4257 /* Before the return label (if any), clobber the return
4258 registers so that they are not propagated live to the rest of
4259 the function. This can only happen with functions that drop
4260 through; if there had been a return statement, there would
4261 have either been a return rtx, or a jump to the return label.
4263 We delay actual code generation after the current_function_value_rtx
4265 clobber_after
= get_last_insn ();
4267 /* Output the label for the actual return from the function. */
4268 emit_label (return_label
);
4270 /* Let except.c know where it should emit the call to unregister
4271 the function context for sjlj exceptions. */
4272 if (flag_exceptions
&& USING_SJLJ_EXCEPTIONS
)
4273 sjlj_emit_function_exit_after (get_last_insn ());
4275 /* If scalar return value was computed in a pseudo-reg, or was a named
4276 return value that got dumped to the stack, copy that to the hard
4278 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4280 tree decl_result
= DECL_RESULT (current_function_decl
);
4281 rtx decl_rtl
= DECL_RTL (decl_result
);
4283 if (REG_P (decl_rtl
)
4284 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4285 : DECL_REGISTER (decl_result
))
4287 rtx real_decl_rtl
= current_function_return_rtx
;
4289 /* This should be set in assign_parms. */
4290 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4292 /* If this is a BLKmode structure being returned in registers,
4293 then use the mode computed in expand_return. Note that if
4294 decl_rtl is memory, then its mode may have been changed,
4295 but that current_function_return_rtx has not. */
4296 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4297 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4299 /* If a non-BLKmode return value should be padded at the least
4300 significant end of the register, shift it left by the appropriate
4301 amount. BLKmode results are handled using the group load/store
4303 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
4304 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
4306 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
4307 REGNO (real_decl_rtl
)),
4309 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
4311 /* If a named return value dumped decl_return to memory, then
4312 we may need to re-do the PROMOTE_MODE signed/unsigned
4314 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4316 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4318 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
4319 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
4322 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4324 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4326 /* If expand_function_start has created a PARALLEL for decl_rtl,
4327 move the result to the real return registers. Otherwise, do
4328 a group load from decl_rtl for a named return. */
4329 if (GET_CODE (decl_rtl
) == PARALLEL
)
4330 emit_group_move (real_decl_rtl
, decl_rtl
);
4332 emit_group_load (real_decl_rtl
, decl_rtl
,
4333 TREE_TYPE (decl_result
),
4334 int_size_in_bytes (TREE_TYPE (decl_result
)));
4337 emit_move_insn (real_decl_rtl
, decl_rtl
);
4341 /* If returning a structure, arrange to return the address of the value
4342 in a place where debuggers expect to find it.
4344 If returning a structure PCC style,
4345 the caller also depends on this value.
4346 And current_function_returns_pcc_struct is not necessarily set. */
4347 if (current_function_returns_struct
4348 || current_function_returns_pcc_struct
)
4350 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
4351 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4354 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
4355 type
= TREE_TYPE (type
);
4357 value_address
= XEXP (value_address
, 0);
4359 #ifdef FUNCTION_OUTGOING_VALUE
4360 outgoing
= FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
4361 current_function_decl
);
4363 outgoing
= FUNCTION_VALUE (build_pointer_type (type
),
4364 current_function_decl
);
4367 /* Mark this as a function return value so integrate will delete the
4368 assignment and USE below when inlining this function. */
4369 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4371 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4372 value_address
= convert_memory_address (GET_MODE (outgoing
),
4375 emit_move_insn (outgoing
, value_address
);
4377 /* Show return register used to hold result (in this case the address
4379 current_function_return_rtx
= outgoing
;
4382 /* If this is an implementation of throw, do what's necessary to
4383 communicate between __builtin_eh_return and the epilogue. */
4384 expand_eh_return ();
4386 /* Emit the actual code to clobber return register. */
4391 clobber_return_register ();
4392 expand_naked_return ();
4396 emit_insn_after (seq
, clobber_after
);
4399 /* Output the label for the naked return from the function. */
4400 emit_label (naked_return_label
);
4402 /* If we had calls to alloca, and this machine needs
4403 an accurate stack pointer to exit the function,
4404 insert some code to save and restore the stack pointer. */
4405 if (! EXIT_IGNORE_STACK
4406 && current_function_calls_alloca
)
4410 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
4411 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
4414 /* ??? This should no longer be necessary since stupid is no longer with
4415 us, but there are some parts of the compiler (eg reload_combine, and
4416 sh mach_dep_reorg) that still try and compute their own lifetime info
4417 instead of using the general framework. */
4418 use_return_register ();
4422 get_arg_pointer_save_area (struct function
*f
)
4424 rtx ret
= f
->x_arg_pointer_save_area
;
4428 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
4429 f
->x_arg_pointer_save_area
= ret
;
4432 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
4436 /* Save the arg pointer at the beginning of the function. The
4437 generated stack slot may not be a valid memory address, so we
4438 have to check it and fix it if necessary. */
4440 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
4444 push_topmost_sequence ();
4445 emit_insn_after (seq
, entry_of_function ());
4446 pop_topmost_sequence ();
4452 /* Extend a vector that records the INSN_UIDs of INSNS
4453 (a list of one or more insns). */
4456 record_insns (rtx insns
, VEC(int,heap
) **vecp
)
4460 for (tmp
= insns
; tmp
!= NULL_RTX
; tmp
= NEXT_INSN (tmp
))
4461 VEC_safe_push (int, heap
, *vecp
, INSN_UID (tmp
));
4464 /* Set the locator of the insn chain starting at INSN to LOC. */
4466 set_insn_locators (rtx insn
, int loc
)
4468 while (insn
!= NULL_RTX
)
4471 INSN_LOCATOR (insn
) = loc
;
4472 insn
= NEXT_INSN (insn
);
4476 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4477 be running after reorg, SEQUENCE rtl is possible. */
4480 contains (rtx insn
, VEC(int,heap
) **vec
)
4484 if (NONJUMP_INSN_P (insn
)
4485 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4488 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
4489 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4490 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
))
4491 == VEC_index (int, *vec
, j
))
4497 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4498 if (INSN_UID (insn
) == VEC_index (int, *vec
, j
))
4505 prologue_epilogue_contains (rtx insn
)
4507 if (contains (insn
, &prologue
))
4509 if (contains (insn
, &epilogue
))
4515 sibcall_epilogue_contains (rtx insn
)
4517 if (sibcall_epilogue
)
4518 return contains (insn
, &sibcall_epilogue
);
4523 /* Insert gen_return at the end of block BB. This also means updating
4524 block_for_insn appropriately. */
4527 emit_return_into_block (basic_block bb
, rtx line_note
)
4529 emit_jump_insn_after (gen_return (), BB_END (bb
));
4531 emit_note_copy_after (line_note
, PREV_INSN (BB_END (bb
)));
4533 #endif /* HAVE_return */
4535 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4537 /* These functions convert the epilogue into a variant that does not
4538 modify the stack pointer. This is used in cases where a function
4539 returns an object whose size is not known until it is computed.
4540 The called function leaves the object on the stack, leaves the
4541 stack depressed, and returns a pointer to the object.
4543 What we need to do is track all modifications and references to the
4544 stack pointer, deleting the modifications and changing the
4545 references to point to the location the stack pointer would have
4546 pointed to had the modifications taken place.
4548 These functions need to be portable so we need to make as few
4549 assumptions about the epilogue as we can. However, the epilogue
4550 basically contains three things: instructions to reset the stack
4551 pointer, instructions to reload registers, possibly including the
4552 frame pointer, and an instruction to return to the caller.
4554 We must be sure of what a relevant epilogue insn is doing. We also
4555 make no attempt to validate the insns we make since if they are
4556 invalid, we probably can't do anything valid. The intent is that
4557 these routines get "smarter" as more and more machines start to use
4558 them and they try operating on different epilogues.
4560 We use the following structure to track what the part of the
4561 epilogue that we've already processed has done. We keep two copies
4562 of the SP equivalence, one for use during the insn we are
4563 processing and one for use in the next insn. The difference is
4564 because one part of a PARALLEL may adjust SP and the other may use
4569 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
4570 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
4571 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
4572 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
4573 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
4574 should be set to once we no longer need
4576 rtx const_equiv
[FIRST_PSEUDO_REGISTER
]; /* Any known constant equivalences
4580 static void handle_epilogue_set (rtx
, struct epi_info
*);
4581 static void update_epilogue_consts (rtx
, rtx
, void *);
4582 static void emit_equiv_load (struct epi_info
*);
4584 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4585 no modifications to the stack pointer. Return the new list of insns. */
4588 keep_stack_depressed (rtx insns
)
4591 struct epi_info info
;
4594 /* If the epilogue is just a single instruction, it must be OK as is. */
4595 if (NEXT_INSN (insns
) == NULL_RTX
)
4598 /* Otherwise, start a sequence, initialize the information we have, and
4599 process all the insns we were given. */
4602 info
.sp_equiv_reg
= stack_pointer_rtx
;
4604 info
.equiv_reg_src
= 0;
4606 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
4607 info
.const_equiv
[j
] = 0;
4611 while (insn
!= NULL_RTX
)
4613 next
= NEXT_INSN (insn
);
4622 /* If this insn references the register that SP is equivalent to and
4623 we have a pending load to that register, we must force out the load
4624 first and then indicate we no longer know what SP's equivalent is. */
4625 if (info
.equiv_reg_src
!= 0
4626 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
4628 emit_equiv_load (&info
);
4629 info
.sp_equiv_reg
= 0;
4632 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
4633 info
.new_sp_offset
= info
.sp_offset
;
4635 /* If this is a (RETURN) and the return address is on the stack,
4636 update the address and change to an indirect jump. */
4637 if (GET_CODE (PATTERN (insn
)) == RETURN
4638 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4639 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
4641 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
4643 HOST_WIDE_INT offset
= 0;
4644 rtx jump_insn
, jump_set
;
4646 /* If the return address is in a register, we can emit the insn
4647 unchanged. Otherwise, it must be a MEM and we see what the
4648 base register and offset are. In any case, we have to emit any
4649 pending load to the equivalent reg of SP, if any. */
4650 if (REG_P (retaddr
))
4652 emit_equiv_load (&info
);
4660 gcc_assert (MEM_P (retaddr
));
4662 ret_ptr
= XEXP (retaddr
, 0);
4664 if (REG_P (ret_ptr
))
4666 base
= gen_rtx_REG (Pmode
, REGNO (ret_ptr
));
4671 gcc_assert (GET_CODE (ret_ptr
) == PLUS
4672 && REG_P (XEXP (ret_ptr
, 0))
4673 && GET_CODE (XEXP (ret_ptr
, 1)) == CONST_INT
);
4674 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (ret_ptr
, 0)));
4675 offset
= INTVAL (XEXP (ret_ptr
, 1));
4679 /* If the base of the location containing the return pointer
4680 is SP, we must update it with the replacement address. Otherwise,
4681 just build the necessary MEM. */
4682 retaddr
= plus_constant (base
, offset
);
4683 if (base
== stack_pointer_rtx
)
4684 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
4685 plus_constant (info
.sp_equiv_reg
,
4688 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
4690 /* If there is a pending load to the equivalent register for SP
4691 and we reference that register, we must load our address into
4692 a scratch register and then do that load. */
4693 if (info
.equiv_reg_src
4694 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
4699 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
4700 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
4701 && !fixed_regs
[regno
]
4702 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
4704 (EXIT_BLOCK_PTR
->il
.rtl
->global_live_at_start
, regno
)
4705 && !refers_to_regno_p (regno
,
4706 regno
+ hard_regno_nregs
[regno
]
4708 info
.equiv_reg_src
, NULL
)
4709 && info
.const_equiv
[regno
] == 0)
4712 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
4714 reg
= gen_rtx_REG (Pmode
, regno
);
4715 emit_move_insn (reg
, retaddr
);
4719 emit_equiv_load (&info
);
4720 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
4722 /* Show the SET in the above insn is a RETURN. */
4723 jump_set
= single_set (jump_insn
);
4724 gcc_assert (jump_set
);
4725 SET_IS_RETURN_P (jump_set
) = 1;
4728 /* If SP is not mentioned in the pattern and its equivalent register, if
4729 any, is not modified, just emit it. Otherwise, if neither is set,
4730 replace the reference to SP and emit the insn. If none of those are
4731 true, handle each SET individually. */
4732 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
4733 && (info
.sp_equiv_reg
== stack_pointer_rtx
4734 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4736 else if (! reg_set_p (stack_pointer_rtx
, insn
)
4737 && (info
.sp_equiv_reg
== stack_pointer_rtx
4738 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4742 changed
= validate_replace_rtx (stack_pointer_rtx
,
4743 plus_constant (info
.sp_equiv_reg
,
4746 gcc_assert (changed
);
4750 else if (GET_CODE (PATTERN (insn
)) == SET
)
4751 handle_epilogue_set (PATTERN (insn
), &info
);
4752 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
4754 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
4755 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
4756 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
4761 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
4762 info
.sp_offset
= info
.new_sp_offset
;
4764 /* Now update any constants this insn sets. */
4765 note_stores (PATTERN (insn
), update_epilogue_consts
, &info
);
4769 insns
= get_insns ();
4774 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4775 structure that contains information about what we've seen so far. We
4776 process this SET by either updating that data or by emitting one or
4780 handle_epilogue_set (rtx set
, struct epi_info
*p
)
4782 /* First handle the case where we are setting SP. Record what it is being
4783 set from, which we must be able to determine */
4784 if (reg_set_p (stack_pointer_rtx
, set
))
4786 gcc_assert (SET_DEST (set
) == stack_pointer_rtx
);
4788 if (GET_CODE (SET_SRC (set
)) == PLUS
)
4790 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
4791 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
4792 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
4795 gcc_assert (REG_P (XEXP (SET_SRC (set
), 1))
4796 && (REGNO (XEXP (SET_SRC (set
), 1))
4797 < FIRST_PSEUDO_REGISTER
)
4798 && p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
4800 = INTVAL (p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
4804 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
4806 /* If we are adjusting SP, we adjust from the old data. */
4807 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
4809 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
4810 p
->new_sp_offset
+= p
->sp_offset
;
4813 gcc_assert (p
->new_sp_equiv_reg
&& REG_P (p
->new_sp_equiv_reg
));
4818 /* Next handle the case where we are setting SP's equivalent
4819 register. We must not already have a value to set it to. We
4820 could update, but there seems little point in handling that case.
4821 Note that we have to allow for the case where we are setting the
4822 register set in the previous part of a PARALLEL inside a single
4823 insn. But use the old offset for any updates within this insn.
4824 We must allow for the case where the register is being set in a
4825 different (usually wider) mode than Pmode). */
4826 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
4828 gcc_assert (!p
->equiv_reg_src
4829 && REG_P (p
->new_sp_equiv_reg
)
4830 && REG_P (SET_DEST (set
))
4831 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
)))
4833 && REGNO (p
->new_sp_equiv_reg
) == REGNO (SET_DEST (set
)));
4835 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4836 plus_constant (p
->sp_equiv_reg
,
4840 /* Otherwise, replace any references to SP in the insn to its new value
4841 and emit the insn. */
4844 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4845 plus_constant (p
->sp_equiv_reg
,
4847 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
4848 plus_constant (p
->sp_equiv_reg
,
4854 /* Update the tracking information for registers set to constants. */
4857 update_epilogue_consts (rtx dest
, rtx x
, void *data
)
4859 struct epi_info
*p
= (struct epi_info
*) data
;
4862 if (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4865 /* If we are either clobbering a register or doing a partial set,
4866 show we don't know the value. */
4867 else if (GET_CODE (x
) == CLOBBER
|| ! rtx_equal_p (dest
, SET_DEST (x
)))
4868 p
->const_equiv
[REGNO (dest
)] = 0;
4870 /* If we are setting it to a constant, record that constant. */
4871 else if (GET_CODE (SET_SRC (x
)) == CONST_INT
)
4872 p
->const_equiv
[REGNO (dest
)] = SET_SRC (x
);
4874 /* If this is a binary operation between a register we have been tracking
4875 and a constant, see if we can compute a new constant value. */
4876 else if (ARITHMETIC_P (SET_SRC (x
))
4877 && REG_P (XEXP (SET_SRC (x
), 0))
4878 && REGNO (XEXP (SET_SRC (x
), 0)) < FIRST_PSEUDO_REGISTER
4879 && p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))] != 0
4880 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
4881 && 0 != (new = simplify_binary_operation
4882 (GET_CODE (SET_SRC (x
)), GET_MODE (dest
),
4883 p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))],
4884 XEXP (SET_SRC (x
), 1)))
4885 && GET_CODE (new) == CONST_INT
)
4886 p
->const_equiv
[REGNO (dest
)] = new;
4888 /* Otherwise, we can't do anything with this value. */
4890 p
->const_equiv
[REGNO (dest
)] = 0;
4893 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4896 emit_equiv_load (struct epi_info
*p
)
4898 if (p
->equiv_reg_src
!= 0)
4900 rtx dest
= p
->sp_equiv_reg
;
4902 if (GET_MODE (p
->equiv_reg_src
) != GET_MODE (dest
))
4903 dest
= gen_rtx_REG (GET_MODE (p
->equiv_reg_src
),
4904 REGNO (p
->sp_equiv_reg
));
4906 emit_move_insn (dest
, p
->equiv_reg_src
);
4907 p
->equiv_reg_src
= 0;
4912 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4913 this into place with notes indicating where the prologue ends and where
4914 the epilogue begins. Update the basic block information when possible. */
4917 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED
)
4921 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4924 #ifdef HAVE_prologue
4925 rtx prologue_end
= NULL_RTX
;
4927 #if defined (HAVE_epilogue) || defined(HAVE_return)
4928 rtx epilogue_end
= NULL_RTX
;
4932 #ifdef HAVE_prologue
4936 seq
= gen_prologue ();
4939 /* Retain a map of the prologue insns. */
4940 record_insns (seq
, &prologue
);
4941 prologue_end
= emit_note (NOTE_INSN_PROLOGUE_END
);
4945 set_insn_locators (seq
, prologue_locator
);
4947 /* Can't deal with multiple successors of the entry block
4948 at the moment. Function should always have at least one
4950 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
4952 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
4957 /* If the exit block has no non-fake predecessors, we don't need
4959 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4960 if ((e
->flags
& EDGE_FAKE
) == 0)
4966 if (optimize
&& HAVE_return
)
4968 /* If we're allowed to generate a simple return instruction,
4969 then by definition we don't need a full epilogue. Examine
4970 the block that falls through to EXIT. If it does not
4971 contain any code, examine its predecessors and try to
4972 emit (conditional) return instructions. */
4977 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4978 if (e
->flags
& EDGE_FALLTHRU
)
4984 /* Verify that there are no active instructions in the last block. */
4985 label
= BB_END (last
);
4986 while (label
&& !LABEL_P (label
))
4988 if (active_insn_p (label
))
4990 label
= PREV_INSN (label
);
4993 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
4996 rtx epilogue_line_note
= NULL_RTX
;
4998 /* Locate the line number associated with the closing brace,
4999 if we can find one. */
5000 for (seq
= get_last_insn ();
5001 seq
&& ! active_insn_p (seq
);
5002 seq
= PREV_INSN (seq
))
5003 if (NOTE_P (seq
) && NOTE_LINE_NUMBER (seq
) > 0)
5005 epilogue_line_note
= seq
;
5009 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
5011 basic_block bb
= e
->src
;
5014 if (bb
== ENTRY_BLOCK_PTR
)
5021 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5027 /* If we have an unconditional jump, we can replace that
5028 with a simple return instruction. */
5029 if (simplejump_p (jump
))
5031 emit_return_into_block (bb
, epilogue_line_note
);
5035 /* If we have a conditional jump, we can try to replace
5036 that with a conditional return instruction. */
5037 else if (condjump_p (jump
))
5039 if (! redirect_jump (jump
, 0, 0))
5045 /* If this block has only one successor, it both jumps
5046 and falls through to the fallthru block, so we can't
5048 if (single_succ_p (bb
))
5060 /* Fix up the CFG for the successful change we just made. */
5061 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5064 /* Emit a return insn for the exit fallthru block. Whether
5065 this is still reachable will be determined later. */
5067 emit_barrier_after (BB_END (last
));
5068 emit_return_into_block (last
, epilogue_line_note
);
5069 epilogue_end
= BB_END (last
);
5070 single_succ_edge (last
)->flags
&= ~EDGE_FALLTHRU
;
5075 /* Find the edge that falls through to EXIT. Other edges may exist
5076 due to RETURN instructions, but those don't need epilogues.
5077 There really shouldn't be a mixture -- either all should have
5078 been converted or none, however... */
5080 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5081 if (e
->flags
& EDGE_FALLTHRU
)
5086 #ifdef HAVE_epilogue
5090 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5092 seq
= gen_epilogue ();
5094 #ifdef INCOMING_RETURN_ADDR_RTX
5095 /* If this function returns with the stack depressed and we can support
5096 it, massage the epilogue to actually do that. */
5097 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
5098 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
5099 seq
= keep_stack_depressed (seq
);
5102 emit_jump_insn (seq
);
5104 /* Retain a map of the epilogue insns. */
5105 record_insns (seq
, &epilogue
);
5106 set_insn_locators (seq
, epilogue_locator
);
5111 insert_insn_on_edge (seq
, e
);
5119 if (! next_active_insn (BB_END (e
->src
)))
5121 /* We have a fall-through edge to the exit block, the source is not
5122 at the end of the function, and there will be an assembler epilogue
5123 at the end of the function.
5124 We can't use force_nonfallthru here, because that would try to
5125 use return. Inserting a jump 'by hand' is extremely messy, so
5126 we take advantage of cfg_layout_finalize using
5127 fixup_fallthru_exit_predecessor. */
5128 cfg_layout_initialize (0);
5129 FOR_EACH_BB (cur_bb
)
5130 if (cur_bb
->index
>= 0 && cur_bb
->next_bb
->index
>= 0)
5131 cur_bb
->rbi
->next
= cur_bb
->next_bb
;
5132 cfg_layout_finalize ();
5137 commit_edge_insertions ();
5139 #ifdef HAVE_sibcall_epilogue
5140 /* Emit sibling epilogues before any sibling call sites. */
5141 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5143 basic_block bb
= e
->src
;
5144 rtx insn
= BB_END (bb
);
5147 || ! SIBLING_CALL_P (insn
))
5154 emit_insn (gen_sibcall_epilogue ());
5158 /* Retain a map of the epilogue insns. Used in life analysis to
5159 avoid getting rid of sibcall epilogue insns. Do this before we
5160 actually emit the sequence. */
5161 record_insns (seq
, &sibcall_epilogue
);
5162 set_insn_locators (seq
, epilogue_locator
);
5164 emit_insn_before (seq
, insn
);
5169 #ifdef HAVE_prologue
5170 /* This is probably all useless now that we use locators. */
5175 /* GDB handles `break f' by setting a breakpoint on the first
5176 line note after the prologue. Which means (1) that if
5177 there are line number notes before where we inserted the
5178 prologue we should move them, and (2) we should generate a
5179 note before the end of the first basic block, if there isn't
5182 ??? This behavior is completely broken when dealing with
5183 multiple entry functions. We simply place the note always
5184 into first basic block and let alternate entry points
5188 for (insn
= prologue_end
; insn
; insn
= prev
)
5190 prev
= PREV_INSN (insn
);
5191 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5193 /* Note that we cannot reorder the first insn in the
5194 chain, since rest_of_compilation relies on that
5195 remaining constant. */
5198 reorder_insns (insn
, insn
, prologue_end
);
5202 /* Find the last line number note in the first block. */
5203 for (insn
= BB_END (ENTRY_BLOCK_PTR
->next_bb
);
5204 insn
!= prologue_end
&& insn
;
5205 insn
= PREV_INSN (insn
))
5206 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5209 /* If we didn't find one, make a copy of the first line number
5213 for (insn
= next_active_insn (prologue_end
);
5215 insn
= PREV_INSN (insn
))
5216 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5218 emit_note_copy_after (insn
, prologue_end
);
5224 #ifdef HAVE_epilogue
5229 /* Similarly, move any line notes that appear after the epilogue.
5230 There is no need, however, to be quite so anal about the existence
5231 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5232 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5234 for (insn
= epilogue_end
; insn
; insn
= next
)
5236 next
= NEXT_INSN (insn
);
5238 && (NOTE_LINE_NUMBER (insn
) > 0
5239 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
5240 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
))
5241 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5247 /* Reposition the prologue-end and epilogue-begin notes after instruction
5248 scheduling and delayed branch scheduling. */
5251 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED
)
5253 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5254 rtx insn
, last
, note
;
5257 if ((len
= VEC_length (int, prologue
)) > 0)
5261 /* Scan from the beginning until we reach the last prologue insn.
5262 We apparently can't depend on basic_block_{head,end} after
5264 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5268 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5271 else if (contains (insn
, &prologue
))
5281 /* Find the prologue-end note if we haven't already, and
5282 move it to just after the last prologue insn. */
5285 for (note
= last
; (note
= NEXT_INSN (note
));)
5287 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5291 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5293 last
= NEXT_INSN (last
);
5294 reorder_insns (note
, note
, last
);
5298 if ((len
= VEC_length (int, epilogue
)) > 0)
5302 /* Scan from the end until we reach the first epilogue insn.
5303 We apparently can't depend on basic_block_{head,end} after
5305 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
5309 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5312 else if (contains (insn
, &epilogue
))
5322 /* Find the epilogue-begin note if we haven't already, and
5323 move it to just before the first epilogue insn. */
5326 for (note
= insn
; (note
= PREV_INSN (note
));)
5328 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5332 if (PREV_INSN (last
) != note
)
5333 reorder_insns (note
, note
, PREV_INSN (last
));
5336 #endif /* HAVE_prologue or HAVE_epilogue */
5339 /* Resets insn_block_boundaries array. */
5342 reset_block_changes (void)
5344 VARRAY_TREE_INIT (cfun
->ib_boundaries_block
, 100, "ib_boundaries_block");
5345 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, NULL_TREE
);
5348 /* Record the boundary for BLOCK. */
5350 record_block_change (tree block
)
5358 last_block
= VARRAY_TOP_TREE (cfun
->ib_boundaries_block
);
5359 VARRAY_POP (cfun
->ib_boundaries_block
);
5361 for (i
= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
); i
< n
; i
++)
5362 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, last_block
);
5364 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, block
);
5367 /* Finishes record of boundaries. */
5368 void finalize_block_changes (void)
5370 record_block_change (DECL_INITIAL (current_function_decl
));
5373 /* For INSN return the BLOCK it belongs to. */
5375 check_block_change (rtx insn
, tree
*block
)
5377 unsigned uid
= INSN_UID (insn
);
5379 if (uid
>= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
))
5382 *block
= VARRAY_TREE (cfun
->ib_boundaries_block
, uid
);
5385 /* Releases the ib_boundaries_block records. */
5387 free_block_changes (void)
5389 cfun
->ib_boundaries_block
= NULL
;
5392 /* Returns the name of the current function. */
5394 current_function_name (void)
5396 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5399 #include "gt-function.h"