1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
39 #include "coretypes.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
59 #include "integrate.h"
60 #include "langhooks.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #ifndef STACK_ALIGNMENT_NEEDED
70 #define STACK_ALIGNMENT_NEEDED 1
73 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
80 #define NAME__MAIN "__main"
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
88 /* Similar, but round to the next highest integer that meets the
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
92 /* Nonzero if function being compiled doesn't contain any calls
93 (ignoring the prologue and epilogue). This is set prior to
94 local register allocation and is valid for the remaining
96 int current_function_is_leaf
;
98 /* Nonzero if function being compiled doesn't modify the stack pointer
99 (ignoring the prologue and epilogue). This is only valid after
100 life_analysis has run. */
101 int current_function_sp_is_unchanging
;
103 /* Nonzero if the function being compiled is a leaf function which only
104 uses leaf registers. This is valid after reload (specifically after
105 sched2) and is useful only if the port defines LEAF_REGISTERS. */
106 int current_function_uses_only_leaf_regs
;
108 /* Nonzero once virtual register instantiation has been done.
109 assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 calls.c:emit_library_call_value_1 uses it to set up
111 post-instantiation libcalls. */
112 int virtuals_instantiated
;
114 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
115 static GTY(()) int funcdef_no
;
117 /* These variables hold pointers to functions to create and destroy
118 target specific, per-function data structures. */
119 struct machine_function
* (*init_machine_status
) (void);
121 /* The currently compiled function. */
122 struct function
*cfun
= 0;
124 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
125 static GTY(()) varray_type prologue
;
126 static GTY(()) varray_type epilogue
;
128 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
130 static GTY(()) varray_type sibcall_epilogue
;
132 /* In order to evaluate some expressions, such as function calls returning
133 structures in memory, we need to temporarily allocate stack locations.
134 We record each allocated temporary in the following structure.
136 Associated with each temporary slot is a nesting level. When we pop up
137 one level, all temporaries associated with the previous level are freed.
138 Normally, all temporaries are freed after the execution of the statement
139 in which they were created. However, if we are inside a ({...}) grouping,
140 the result may be in a temporary and hence must be preserved. If the
141 result could be in a temporary, we preserve it if we can determine which
142 one it is in. If we cannot determine which temporary may contain the
143 result, all temporaries are preserved. A temporary is preserved by
144 pretending it was allocated at the previous nesting level.
146 Automatic variables are also assigned temporary slots, at the nesting
147 level where they are defined. They are marked a "kept" so that
148 free_temp_slots will not free them. */
150 struct temp_slot
GTY(())
152 /* Points to next temporary slot. */
153 struct temp_slot
*next
;
154 /* Points to previous temporary slot. */
155 struct temp_slot
*prev
;
157 /* The rtx to used to reference the slot. */
159 /* The rtx used to represent the address if not the address of the
160 slot above. May be an EXPR_LIST if multiple addresses exist. */
162 /* The alignment (in bits) of the slot. */
164 /* The size, in units, of the slot. */
166 /* The type of the object in the slot, or zero if it doesn't correspond
167 to a type. We use this to determine whether a slot can be reused.
168 It can be reused if objects of the type of the new slot will always
169 conflict with objects of the type of the old slot. */
171 /* Nonzero if this temporary is currently in use. */
173 /* Nonzero if this temporary has its address taken. */
175 /* Nesting level at which this slot is being used. */
177 /* Nonzero if this should survive a call to free_temp_slots. */
179 /* The offset of the slot from the frame_pointer, including extra space
180 for alignment. This info is for combine_temp_slots. */
181 HOST_WIDE_INT base_offset
;
182 /* The size of the slot, including extra space for alignment. This
183 info is for combine_temp_slots. */
184 HOST_WIDE_INT full_size
;
187 /* Forward declarations. */
189 static rtx
assign_stack_local_1 (enum machine_mode
, HOST_WIDE_INT
, int,
191 static struct temp_slot
*find_temp_slot_from_address (rtx
);
192 static void instantiate_decls (tree
, int);
193 static void instantiate_decls_1 (tree
, int);
194 static void instantiate_decl (rtx
, HOST_WIDE_INT
, int);
195 static rtx
instantiate_new_reg (rtx
, HOST_WIDE_INT
*);
196 static int instantiate_virtual_regs_1 (rtx
*, rtx
, int);
197 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
198 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
199 static void reorder_blocks_1 (rtx
, tree
, varray_type
*);
200 static void reorder_fix_fragments (tree
);
201 static int all_blocks (tree
, tree
*);
202 static tree
*get_block_vector (tree
, int *);
203 extern tree
debug_find_var_in_block_tree (tree
, tree
);
204 /* We always define `record_insns' even if it's not used so that we
205 can always export `prologue_epilogue_contains'. */
206 static void record_insns (rtx
, varray_type
*) ATTRIBUTE_UNUSED
;
207 static int contains (rtx
, varray_type
);
209 static void emit_return_into_block (basic_block
, rtx
);
211 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
212 static rtx
keep_stack_depressed (rtx
);
214 static void prepare_function_start (tree
);
215 static void do_clobber_return_reg (rtx
, void *);
216 static void do_use_return_reg (rtx
, void *);
217 static void instantiate_virtual_regs_lossage (rtx
);
218 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
220 /* Pointer to chain of `struct function' for containing functions. */
221 struct function
*outer_function_chain
;
223 /* Given a function decl for a containing function,
224 return the `struct function' for it. */
227 find_function_data (tree decl
)
231 for (p
= outer_function_chain
; p
; p
= p
->outer
)
238 /* Save the current context for compilation of a nested function.
239 This is called from language-specific code. The caller should use
240 the enter_nested langhook to save any language-specific state,
241 since this function knows only about language-independent
245 push_function_context_to (tree context
)
251 if (context
== current_function_decl
)
252 cfun
->contains_functions
= 1;
255 struct function
*containing
= find_function_data (context
);
256 containing
->contains_functions
= 1;
261 init_dummy_function_start ();
264 p
->outer
= outer_function_chain
;
265 outer_function_chain
= p
;
267 lang_hooks
.function
.enter_nested (p
);
273 push_function_context (void)
275 push_function_context_to (current_function_decl
);
278 /* Restore the last saved context, at the end of a nested function.
279 This function is called from language-specific code. */
282 pop_function_context_from (tree context ATTRIBUTE_UNUSED
)
284 struct function
*p
= outer_function_chain
;
287 outer_function_chain
= p
->outer
;
289 current_function_decl
= p
->decl
;
291 lang_hooks
.function
.leave_nested (p
);
293 /* Reset variables that have known state during rtx generation. */
294 virtuals_instantiated
= 0;
295 generating_concat_p
= 1;
299 pop_function_context (void)
301 pop_function_context_from (current_function_decl
);
304 /* Clear out all parts of the state in F that can safely be discarded
305 after the function has been parsed, but not compiled, to let
306 garbage collection reclaim the memory. */
309 free_after_parsing (struct function
*f
)
311 /* f->expr->forced_labels is used by code generation. */
312 /* f->emit->regno_reg_rtx is used by code generation. */
313 /* f->varasm is used by code generation. */
314 /* f->eh->eh_return_stub_label is used by code generation. */
316 lang_hooks
.function
.final (f
);
319 /* Clear out all parts of the state in F that can safely be discarded
320 after the function has been compiled, to let garbage collection
321 reclaim the memory. */
324 free_after_compilation (struct function
*f
)
332 f
->x_avail_temp_slots
= NULL
;
333 f
->x_used_temp_slots
= NULL
;
334 f
->arg_offset_rtx
= NULL
;
335 f
->return_rtx
= NULL
;
336 f
->internal_arg_pointer
= NULL
;
337 f
->x_nonlocal_goto_handler_labels
= NULL
;
338 f
->x_return_label
= NULL
;
339 f
->x_naked_return_label
= NULL
;
340 f
->x_stack_slot_list
= NULL
;
341 f
->x_tail_recursion_reentry
= NULL
;
342 f
->x_arg_pointer_save_area
= NULL
;
343 f
->x_parm_birth_insn
= NULL
;
344 f
->original_arg_vector
= NULL
;
345 f
->original_decl_initial
= NULL
;
346 f
->epilogue_delay_list
= NULL
;
349 /* Allocate fixed slots in the stack frame of the current function. */
351 /* Return size needed for stack frame based on slots so far allocated in
353 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
354 the caller may have to do that. */
357 get_func_frame_size (struct function
*f
)
359 #ifdef FRAME_GROWS_DOWNWARD
360 return -f
->x_frame_offset
;
362 return f
->x_frame_offset
;
366 /* Return size needed for stack frame based on slots so far allocated.
367 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
368 the caller may have to do that. */
370 get_frame_size (void)
372 return get_func_frame_size (cfun
);
375 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
376 with machine mode MODE.
378 ALIGN controls the amount of alignment for the address of the slot:
379 0 means according to MODE,
380 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
381 -2 means use BITS_PER_UNIT,
382 positive specifies alignment boundary in bits.
384 We do not round to stack_boundary here.
386 FUNCTION specifies the function to allocate in. */
389 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
, int align
,
390 struct function
*function
)
393 int bigend_correction
= 0;
394 unsigned int alignment
;
395 int frame_off
, frame_alignment
, frame_phase
;
402 alignment
= BIGGEST_ALIGNMENT
;
404 alignment
= GET_MODE_ALIGNMENT (mode
);
406 /* Allow the target to (possibly) increase the alignment of this
408 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
410 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
412 alignment
/= BITS_PER_UNIT
;
414 else if (align
== -1)
416 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
417 size
= CEIL_ROUND (size
, alignment
);
419 else if (align
== -2)
420 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
422 alignment
= align
/ BITS_PER_UNIT
;
424 #ifdef FRAME_GROWS_DOWNWARD
425 function
->x_frame_offset
-= size
;
428 /* Ignore alignment we can't do with expected alignment of the boundary. */
429 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
430 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
432 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
433 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
435 /* Calculate how many bytes the start of local variables is off from
437 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
438 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
439 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
441 /* Round the frame offset to the specified alignment. The default is
442 to always honor requests to align the stack but a port may choose to
443 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
444 if (STACK_ALIGNMENT_NEEDED
448 /* We must be careful here, since FRAME_OFFSET might be negative and
449 division with a negative dividend isn't as well defined as we might
450 like. So we instead assume that ALIGNMENT is a power of two and
451 use logical operations which are unambiguous. */
452 #ifdef FRAME_GROWS_DOWNWARD
453 function
->x_frame_offset
454 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
,
455 (unsigned HOST_WIDE_INT
) alignment
)
458 function
->x_frame_offset
459 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
,
460 (unsigned HOST_WIDE_INT
) alignment
)
465 /* On a big-endian machine, if we are allocating more space than we will use,
466 use the least significant bytes of those that are allocated. */
467 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
468 bigend_correction
= size
- GET_MODE_SIZE (mode
);
470 /* If we have already instantiated virtual registers, return the actual
471 address relative to the frame pointer. */
472 if (function
== cfun
&& virtuals_instantiated
)
473 addr
= plus_constant (frame_pointer_rtx
,
475 (frame_offset
+ bigend_correction
476 + STARTING_FRAME_OFFSET
, Pmode
));
478 addr
= plus_constant (virtual_stack_vars_rtx
,
480 (function
->x_frame_offset
+ bigend_correction
,
483 #ifndef FRAME_GROWS_DOWNWARD
484 function
->x_frame_offset
+= size
;
487 x
= gen_rtx_MEM (mode
, addr
);
489 function
->x_stack_slot_list
490 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
495 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
499 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
501 return assign_stack_local_1 (mode
, size
, align
, cfun
);
505 /* Removes temporary slot TEMP from LIST. */
508 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
511 temp
->next
->prev
= temp
->prev
;
513 temp
->prev
->next
= temp
->next
;
517 temp
->prev
= temp
->next
= NULL
;
520 /* Inserts temporary slot TEMP to LIST. */
523 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
527 (*list
)->prev
= temp
;
532 /* Returns the list of used temp slots at LEVEL. */
534 static struct temp_slot
**
535 temp_slots_at_level (int level
)
538 if (!used_temp_slots
)
539 VARRAY_GENERIC_PTR_INIT (used_temp_slots
, 3, "used_temp_slots");
541 while (level
>= (int) VARRAY_ACTIVE_SIZE (used_temp_slots
))
542 VARRAY_PUSH_GENERIC_PTR (used_temp_slots
, NULL
);
544 return (struct temp_slot
**) &VARRAY_GENERIC_PTR (used_temp_slots
, level
);
547 /* Returns the maximal temporary slot level. */
550 max_slot_level (void)
552 if (!used_temp_slots
)
555 return VARRAY_ACTIVE_SIZE (used_temp_slots
) - 1;
558 /* Moves temporary slot TEMP to LEVEL. */
561 move_slot_to_level (struct temp_slot
*temp
, int level
)
563 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
564 insert_slot_to_list (temp
, temp_slots_at_level (level
));
568 /* Make temporary slot TEMP available. */
571 make_slot_available (struct temp_slot
*temp
)
573 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
574 insert_slot_to_list (temp
, &avail_temp_slots
);
579 /* Allocate a temporary stack slot and record it for possible later
582 MODE is the machine mode to be given to the returned rtx.
584 SIZE is the size in units of the space required. We do no rounding here
585 since assign_stack_local will do any required rounding.
587 KEEP is 1 if this slot is to be retained after a call to
588 free_temp_slots. Automatic variables for a block are allocated
589 with this flag. KEEP values of 2 or 3 were needed respectively
590 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
591 or for SAVE_EXPRs, but they are now unused and will abort.
593 TYPE is the type that will be used for the stack slot. */
596 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
,
600 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
603 /* If SIZE is -1 it means that somebody tried to allocate a temporary
604 of a variable size. */
605 gcc_assert (size
!= -1);
607 /* These are now unused. */
608 gcc_assert (keep
<= 1);
611 align
= BIGGEST_ALIGNMENT
;
613 align
= GET_MODE_ALIGNMENT (mode
);
616 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
619 align
= LOCAL_ALIGNMENT (type
, align
);
621 /* Try to find an available, already-allocated temporary of the proper
622 mode which meets the size and alignment requirements. Choose the
623 smallest one with the closest alignment. */
624 for (p
= avail_temp_slots
; p
; p
= p
->next
)
626 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
627 && objects_must_conflict_p (p
->type
, type
)
628 && (best_p
== 0 || best_p
->size
> p
->size
629 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
631 if (p
->align
== align
&& p
->size
== size
)
634 cut_slot_from_list (selected
, &avail_temp_slots
);
642 /* Make our best, if any, the one to use. */
646 cut_slot_from_list (selected
, &avail_temp_slots
);
648 /* If there are enough aligned bytes left over, make them into a new
649 temp_slot so that the extra bytes don't get wasted. Do this only
650 for BLKmode slots, so that we can be sure of the alignment. */
651 if (GET_MODE (best_p
->slot
) == BLKmode
)
653 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
654 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
656 if (best_p
->size
- rounded_size
>= alignment
)
658 p
= ggc_alloc (sizeof (struct temp_slot
));
659 p
->in_use
= p
->addr_taken
= 0;
660 p
->size
= best_p
->size
- rounded_size
;
661 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
662 p
->full_size
= best_p
->full_size
- rounded_size
;
663 p
->slot
= gen_rtx_MEM (BLKmode
,
664 plus_constant (XEXP (best_p
->slot
, 0),
666 p
->align
= best_p
->align
;
668 p
->type
= best_p
->type
;
669 insert_slot_to_list (p
, &avail_temp_slots
);
671 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
674 best_p
->size
= rounded_size
;
675 best_p
->full_size
= rounded_size
;
680 /* If we still didn't find one, make a new temporary. */
683 HOST_WIDE_INT frame_offset_old
= frame_offset
;
685 p
= ggc_alloc (sizeof (struct temp_slot
));
687 /* We are passing an explicit alignment request to assign_stack_local.
688 One side effect of that is assign_stack_local will not round SIZE
689 to ensure the frame offset remains suitably aligned.
691 So for requests which depended on the rounding of SIZE, we go ahead
692 and round it now. We also make sure ALIGNMENT is at least
693 BIGGEST_ALIGNMENT. */
694 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
695 p
->slot
= assign_stack_local (mode
,
697 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
703 /* The following slot size computation is necessary because we don't
704 know the actual size of the temporary slot until assign_stack_local
705 has performed all the frame alignment and size rounding for the
706 requested temporary. Note that extra space added for alignment
707 can be either above or below this stack slot depending on which
708 way the frame grows. We include the extra space if and only if it
709 is above this slot. */
710 #ifdef FRAME_GROWS_DOWNWARD
711 p
->size
= frame_offset_old
- frame_offset
;
716 /* Now define the fields used by combine_temp_slots. */
717 #ifdef FRAME_GROWS_DOWNWARD
718 p
->base_offset
= frame_offset
;
719 p
->full_size
= frame_offset_old
- frame_offset
;
721 p
->base_offset
= frame_offset_old
;
722 p
->full_size
= frame_offset
- frame_offset_old
;
733 p
->level
= temp_slot_level
;
736 pp
= temp_slots_at_level (p
->level
);
737 insert_slot_to_list (p
, pp
);
739 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
740 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
741 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
743 /* If we know the alias set for the memory that will be used, use
744 it. If there's no TYPE, then we don't know anything about the
745 alias set for the memory. */
746 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
747 set_mem_align (slot
, align
);
749 /* If a type is specified, set the relevant flags. */
752 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
753 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
759 /* Allocate a temporary stack slot and record it for possible later
760 reuse. First three arguments are same as in preceding function. */
763 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
765 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
768 /* Assign a temporary.
769 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
770 and so that should be used in error messages. In either case, we
771 allocate of the given type.
772 KEEP is as for assign_stack_temp.
773 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
774 it is 0 if a register is OK.
775 DONT_PROMOTE is 1 if we should not promote values in register
779 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
780 int dont_promote ATTRIBUTE_UNUSED
)
783 enum machine_mode mode
;
788 if (DECL_P (type_or_decl
))
789 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
791 decl
= NULL
, type
= type_or_decl
;
793 mode
= TYPE_MODE (type
);
795 unsignedp
= TYPE_UNSIGNED (type
);
798 if (mode
== BLKmode
|| memory_required
)
800 HOST_WIDE_INT size
= int_size_in_bytes (type
);
804 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
805 problems with allocating the stack space. */
809 /* Unfortunately, we don't yet know how to allocate variable-sized
810 temporaries. However, sometimes we have a fixed upper limit on
811 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
812 instead. This is the case for Chill variable-sized strings. */
813 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
814 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
815 && host_integerp (TYPE_ARRAY_MAX_SIZE (type
), 1))
816 size
= tree_low_cst (TYPE_ARRAY_MAX_SIZE (type
), 1);
818 /* If we still haven't been able to get a size, see if the language
819 can compute a maximum size. */
821 && (size_tree
= lang_hooks
.types
.max_size (type
)) != 0
822 && host_integerp (size_tree
, 1))
823 size
= tree_low_cst (size_tree
, 1);
825 /* The size of the temporary may be too large to fit into an integer. */
826 /* ??? Not sure this should happen except for user silliness, so limit
827 this to things that aren't compiler-generated temporaries. The
828 rest of the time we'll abort in assign_stack_temp_for_type. */
829 if (decl
&& size
== -1
830 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
832 error ("%Jsize of variable %qD is too large", decl
, decl
);
836 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
842 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
845 return gen_reg_rtx (mode
);
848 /* Combine temporary stack slots which are adjacent on the stack.
850 This allows for better use of already allocated stack space. This is only
851 done for BLKmode slots because we can be sure that we won't have alignment
852 problems in this case. */
855 combine_temp_slots (void)
857 struct temp_slot
*p
, *q
, *next
, *next_q
;
860 /* We can't combine slots, because the information about which slot
861 is in which alias set will be lost. */
862 if (flag_strict_aliasing
)
865 /* If there are a lot of temp slots, don't do anything unless
866 high levels of optimization. */
867 if (! flag_expensive_optimizations
)
868 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
869 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
872 for (p
= avail_temp_slots
; p
; p
= next
)
878 if (GET_MODE (p
->slot
) != BLKmode
)
881 for (q
= p
->next
; q
; q
= next_q
)
887 if (GET_MODE (q
->slot
) != BLKmode
)
890 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
892 /* Q comes after P; combine Q into P. */
894 p
->full_size
+= q
->full_size
;
897 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
899 /* P comes after Q; combine P into Q. */
901 q
->full_size
+= p
->full_size
;
906 cut_slot_from_list (q
, &avail_temp_slots
);
909 /* Either delete P or advance past it. */
911 cut_slot_from_list (p
, &avail_temp_slots
);
915 /* Find the temp slot corresponding to the object at address X. */
917 static struct temp_slot
*
918 find_temp_slot_from_address (rtx x
)
924 for (i
= max_slot_level (); i
>= 0; i
--)
925 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
927 if (XEXP (p
->slot
, 0) == x
929 || (GET_CODE (x
) == PLUS
930 && XEXP (x
, 0) == virtual_stack_vars_rtx
931 && GET_CODE (XEXP (x
, 1)) == CONST_INT
932 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
933 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
936 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
937 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
938 if (XEXP (next
, 0) == x
)
942 /* If we have a sum involving a register, see if it points to a temp
944 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
945 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
947 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
948 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
954 /* Indicate that NEW is an alternate way of referring to the temp slot
955 that previously was known by OLD. */
958 update_temp_slot_address (rtx old
, rtx
new)
962 if (rtx_equal_p (old
, new))
965 p
= find_temp_slot_from_address (old
);
967 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
968 is a register, see if one operand of the PLUS is a temporary
969 location. If so, NEW points into it. Otherwise, if both OLD and
970 NEW are a PLUS and if there is a register in common between them.
971 If so, try a recursive call on those values. */
974 if (GET_CODE (old
) != PLUS
)
979 update_temp_slot_address (XEXP (old
, 0), new);
980 update_temp_slot_address (XEXP (old
, 1), new);
983 else if (GET_CODE (new) != PLUS
)
986 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
987 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
988 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
991 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
992 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
998 /* Otherwise add an alias for the temp's address. */
999 else if (p
->address
== 0)
1003 if (GET_CODE (p
->address
) != EXPR_LIST
)
1004 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1006 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1010 /* If X could be a reference to a temporary slot, mark the fact that its
1011 address was taken. */
1014 mark_temp_addr_taken (rtx x
)
1016 struct temp_slot
*p
;
1021 /* If X is not in memory or is at a constant address, it cannot be in
1022 a temporary slot. */
1023 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1026 p
= find_temp_slot_from_address (XEXP (x
, 0));
1031 /* If X could be a reference to a temporary slot, mark that slot as
1032 belonging to the to one level higher than the current level. If X
1033 matched one of our slots, just mark that one. Otherwise, we can't
1034 easily predict which it is, so upgrade all of them. Kept slots
1035 need not be touched.
1037 This is called when an ({...}) construct occurs and a statement
1038 returns a value in memory. */
1041 preserve_temp_slots (rtx x
)
1043 struct temp_slot
*p
= 0, *next
;
1045 /* If there is no result, we still might have some objects whose address
1046 were taken, so we need to make sure they stay around. */
1049 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1054 move_slot_to_level (p
, temp_slot_level
- 1);
1060 /* If X is a register that is being used as a pointer, see if we have
1061 a temporary slot we know it points to. To be consistent with
1062 the code below, we really should preserve all non-kept slots
1063 if we can't find a match, but that seems to be much too costly. */
1064 if (REG_P (x
) && REG_POINTER (x
))
1065 p
= find_temp_slot_from_address (x
);
1067 /* If X is not in memory or is at a constant address, it cannot be in
1068 a temporary slot, but it can contain something whose address was
1070 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1072 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1077 move_slot_to_level (p
, temp_slot_level
- 1);
1083 /* First see if we can find a match. */
1085 p
= find_temp_slot_from_address (XEXP (x
, 0));
1089 /* Move everything at our level whose address was taken to our new
1090 level in case we used its address. */
1091 struct temp_slot
*q
;
1093 if (p
->level
== temp_slot_level
)
1095 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1099 if (p
!= q
&& q
->addr_taken
)
1100 move_slot_to_level (q
, temp_slot_level
- 1);
1103 move_slot_to_level (p
, temp_slot_level
- 1);
1109 /* Otherwise, preserve all non-kept slots at this level. */
1110 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1115 move_slot_to_level (p
, temp_slot_level
- 1);
1119 /* Free all temporaries used so far. This is normally called at the
1120 end of generating code for a statement. */
1123 free_temp_slots (void)
1125 struct temp_slot
*p
, *next
;
1127 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1132 make_slot_available (p
);
1135 combine_temp_slots ();
1138 /* Push deeper into the nesting level for stack temporaries. */
1141 push_temp_slots (void)
1146 /* Pop a temporary nesting level. All slots in use in the current level
1150 pop_temp_slots (void)
1152 struct temp_slot
*p
, *next
;
1154 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1157 make_slot_available (p
);
1160 combine_temp_slots ();
1165 /* Initialize temporary slots. */
1168 init_temp_slots (void)
1170 /* We have not allocated any temporaries yet. */
1171 avail_temp_slots
= 0;
1172 used_temp_slots
= 0;
1173 temp_slot_level
= 0;
1176 /* These routines are responsible for converting virtual register references
1177 to the actual hard register references once RTL generation is complete.
1179 The following four variables are used for communication between the
1180 routines. They contain the offsets of the virtual registers from their
1181 respective hard registers. */
1183 static int in_arg_offset
;
1184 static int var_offset
;
1185 static int dynamic_offset
;
1186 static int out_arg_offset
;
1187 static int cfa_offset
;
1189 /* In most machines, the stack pointer register is equivalent to the bottom
1192 #ifndef STACK_POINTER_OFFSET
1193 #define STACK_POINTER_OFFSET 0
1196 /* If not defined, pick an appropriate default for the offset of dynamically
1197 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1198 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1200 #ifndef STACK_DYNAMIC_OFFSET
1202 /* The bottom of the stack points to the actual arguments. If
1203 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1204 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1205 stack space for register parameters is not pushed by the caller, but
1206 rather part of the fixed stack areas and hence not included in
1207 `current_function_outgoing_args_size'. Nevertheless, we must allow
1208 for it when allocating stack dynamic objects. */
1210 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1211 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1212 ((ACCUMULATE_OUTGOING_ARGS \
1213 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1214 + (STACK_POINTER_OFFSET)) \
1217 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1218 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1219 + (STACK_POINTER_OFFSET))
1223 /* On most machines, the CFA coincides with the first incoming parm. */
1225 #ifndef ARG_POINTER_CFA_OFFSET
1226 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1230 /* Pass through the INSNS of function FNDECL and convert virtual register
1231 references to hard register references. */
1234 instantiate_virtual_regs (void)
1238 /* Compute the offsets to use for this function. */
1239 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1240 var_offset
= STARTING_FRAME_OFFSET
;
1241 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1242 out_arg_offset
= STACK_POINTER_OFFSET
;
1243 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1245 /* Scan all variables and parameters of this function. For each that is
1246 in memory, instantiate all virtual registers if the result is a valid
1247 address. If not, we do it later. That will handle most uses of virtual
1248 regs on many machines. */
1249 instantiate_decls (current_function_decl
, 1);
1251 /* Initialize recognition, indicating that volatile is OK. */
1254 /* Scan through all the insns, instantiating every virtual register still
1256 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1257 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
1258 || GET_CODE (insn
) == CALL_INSN
)
1260 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
1261 if (INSN_DELETED_P (insn
))
1263 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
1264 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1265 if (GET_CODE (insn
) == CALL_INSN
)
1266 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn
),
1269 /* Past this point all ASM statements should match. Verify that
1270 to avoid failures later in the compilation process. */
1271 if (asm_noperands (PATTERN (insn
)) >= 0
1272 && ! check_asm_operands (PATTERN (insn
)))
1273 instantiate_virtual_regs_lossage (insn
);
1276 /* Now instantiate the remaining register equivalences for debugging info.
1277 These will not be valid addresses. */
1278 instantiate_decls (current_function_decl
, 0);
1280 /* Indicate that, from now on, assign_stack_local should use
1281 frame_pointer_rtx. */
1282 virtuals_instantiated
= 1;
1285 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1286 all virtual registers in their DECL_RTL's.
1288 If VALID_ONLY, do this only if the resulting address is still valid.
1289 Otherwise, always do it. */
1292 instantiate_decls (tree fndecl
, int valid_only
)
1296 /* Process all parameters of the function. */
1297 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1299 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
1300 HOST_WIDE_INT size_rtl
;
1302 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
1304 /* If the parameter was promoted, then the incoming RTL mode may be
1305 larger than the declared type size. We must use the larger of
1307 size_rtl
= GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
)));
1308 size
= MAX (size_rtl
, size
);
1309 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
1312 /* Now process all variables defined in the function or its subblocks. */
1313 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
1316 /* Subroutine of instantiate_decls: Process all decls in the given
1317 BLOCK node and all its subblocks. */
1320 instantiate_decls_1 (tree let
, int valid_only
)
1324 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1325 if (DECL_RTL_SET_P (t
))
1326 instantiate_decl (DECL_RTL (t
),
1327 int_size_in_bytes (TREE_TYPE (t
)),
1330 /* Process all subblocks. */
1331 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1332 instantiate_decls_1 (t
, valid_only
);
1335 /* Subroutine of the preceding procedures: Given RTL representing a
1336 decl and the size of the object, do any instantiation required.
1338 If VALID_ONLY is nonzero, it means that the RTL should only be
1339 changed if the new address is valid. */
1342 instantiate_decl (rtx x
, HOST_WIDE_INT size
, int valid_only
)
1344 enum machine_mode mode
;
1350 /* If this is a CONCAT, recurse for the pieces. */
1351 if (GET_CODE (x
) == CONCAT
)
1353 instantiate_decl (XEXP (x
, 0), size
/ 2, valid_only
);
1354 instantiate_decl (XEXP (x
, 1), size
/ 2, valid_only
);
1358 /* If this is not a MEM, no need to do anything. Similarly if the
1359 address is a constant or a register that is not a virtual register. */
1364 if (CONSTANT_P (addr
)
1366 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1367 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1370 /* If we should only do this if the address is valid, copy the address.
1371 We need to do this so we can undo any changes that might make the
1372 address invalid. This copy is unfortunate, but probably can't be
1376 addr
= copy_rtx (addr
);
1378 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
1380 if (valid_only
&& size
>= 0)
1382 unsigned HOST_WIDE_INT decl_size
= size
;
1384 /* Now verify that the resulting address is valid for every integer or
1385 floating-point mode up to and including SIZE bytes long. We do this
1386 since the object might be accessed in any mode and frame addresses
1389 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1390 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
1391 mode
= GET_MODE_WIDER_MODE (mode
))
1392 if (! memory_address_p (mode
, addr
))
1395 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
1396 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
1397 mode
= GET_MODE_WIDER_MODE (mode
))
1398 if (! memory_address_p (mode
, addr
))
1402 /* Put back the address now that we have updated it and we either know
1403 it is valid or we don't care whether it is valid. */
1408 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1409 is a virtual register, return the equivalent hard register and set the
1410 offset indirectly through the pointer. Otherwise, return 0. */
1413 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1416 HOST_WIDE_INT offset
;
1418 if (x
== virtual_incoming_args_rtx
)
1419 new = arg_pointer_rtx
, offset
= in_arg_offset
;
1420 else if (x
== virtual_stack_vars_rtx
)
1421 new = frame_pointer_rtx
, offset
= var_offset
;
1422 else if (x
== virtual_stack_dynamic_rtx
)
1423 new = stack_pointer_rtx
, offset
= dynamic_offset
;
1424 else if (x
== virtual_outgoing_args_rtx
)
1425 new = stack_pointer_rtx
, offset
= out_arg_offset
;
1426 else if (x
== virtual_cfa_rtx
)
1427 new = arg_pointer_rtx
, offset
= cfa_offset
;
1436 /* Called when instantiate_virtual_regs has failed to update the instruction.
1437 Usually this means that non-matching instruction has been emit, however for
1438 asm statements it may be the problem in the constraints. */
1440 instantiate_virtual_regs_lossage (rtx insn
)
1442 gcc_assert (asm_noperands (PATTERN (insn
)) >= 0);
1443 error_for_asm (insn
, "impossible constraint in %<asm%>");
1446 /* Given a pointer to a piece of rtx and an optional pointer to the
1447 containing object, instantiate any virtual registers present in it.
1449 If EXTRA_INSNS, we always do the replacement and generate
1450 any extra insns before OBJECT. If it zero, we do nothing if replacement
1453 Return 1 if we either had nothing to do or if we were able to do the
1454 needed replacement. Return 0 otherwise; we only return zero if
1455 EXTRA_INSNS is zero.
1457 We first try some simple transformations to avoid the creation of extra
1461 instantiate_virtual_regs_1 (rtx
*loc
, rtx object
, int extra_insns
)
1466 HOST_WIDE_INT offset
= 0;
1472 /* Re-start here to avoid recursion in common cases. */
1479 /* We may have detected and deleted invalid asm statements. */
1480 if (object
&& INSN_P (object
) && INSN_DELETED_P (object
))
1483 code
= GET_CODE (x
);
1485 /* Check for some special cases. */
1503 /* We are allowed to set the virtual registers. This means that
1504 the actual register should receive the source minus the
1505 appropriate offset. This is used, for example, in the handling
1506 of non-local gotos. */
1507 if ((new = instantiate_new_reg (SET_DEST (x
), &offset
)) != 0)
1509 rtx src
= SET_SRC (x
);
1511 /* We are setting the register, not using it, so the relevant
1512 offset is the negative of the offset to use were we using
1515 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
1517 /* The only valid sources here are PLUS or REG. Just do
1518 the simplest possible thing to handle them. */
1519 if (!REG_P (src
) && GET_CODE (src
) != PLUS
)
1521 instantiate_virtual_regs_lossage (object
);
1527 temp
= force_operand (src
, NULL_RTX
);
1530 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
1534 emit_insn_before (seq
, object
);
1537 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
1539 instantiate_virtual_regs_lossage (object
);
1544 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
1549 /* Handle special case of virtual register plus constant. */
1550 if (CONSTANT_P (XEXP (x
, 1)))
1552 rtx old
, new_offset
;
1554 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1555 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
1557 if ((new = instantiate_new_reg (XEXP (XEXP (x
, 0), 0), &offset
)))
1559 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
1561 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
1570 #ifdef POINTERS_EXTEND_UNSIGNED
1571 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1572 we can commute the PLUS and SUBREG because pointers into the
1573 frame are well-behaved. */
1574 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
&& GET_MODE (x
) == ptr_mode
1575 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1577 = instantiate_new_reg (SUBREG_REG (XEXP (x
, 0)),
1579 && validate_change (object
, loc
,
1580 plus_constant (gen_lowpart (ptr_mode
,
1583 + INTVAL (XEXP (x
, 1))),
1587 else if ((new = instantiate_new_reg (XEXP (x
, 0), &offset
)) == 0)
1589 /* We know the second operand is a constant. Unless the
1590 first operand is a REG (which has been already checked),
1591 it needs to be checked. */
1592 if (!REG_P (XEXP (x
, 0)))
1600 new_offset
= plus_constant (XEXP (x
, 1), offset
);
1602 /* If the new constant is zero, try to replace the sum with just
1604 if (new_offset
== const0_rtx
1605 && validate_change (object
, loc
, new, 0))
1608 /* Next try to replace the register and new offset.
1609 There are two changes to validate here and we can't assume that
1610 in the case of old offset equals new just changing the register
1611 will yield a valid insn. In the interests of a little efficiency,
1612 however, we only call validate change once (we don't queue up the
1613 changes and then call apply_change_group). */
1617 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
1618 : (XEXP (x
, 0) = new,
1619 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
1627 /* Otherwise copy the new constant into a register and replace
1628 constant with that register. */
1629 temp
= gen_reg_rtx (Pmode
);
1631 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
1632 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
1635 /* If that didn't work, replace this expression with a
1636 register containing the sum. */
1639 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
1642 temp
= force_operand (new, NULL_RTX
);
1646 emit_insn_before (seq
, object
);
1647 if (! validate_change (object
, loc
, temp
, 0)
1648 && ! validate_replace_rtx (x
, temp
, object
))
1650 instantiate_virtual_regs_lossage (object
);
1659 /* Fall through to generic two-operand expression case. */
1665 case DIV
: case UDIV
:
1666 case MOD
: case UMOD
:
1667 case AND
: case IOR
: case XOR
:
1668 case ROTATERT
: case ROTATE
:
1669 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
1671 case GE
: case GT
: case GEU
: case GTU
:
1672 case LE
: case LT
: case LEU
: case LTU
:
1673 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
1674 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
1679 /* Most cases of MEM that convert to valid addresses have already been
1680 handled by our scan of decls. The only special handling we
1681 need here is to make a copy of the rtx to ensure it isn't being
1682 shared if we have to change it to a pseudo.
1684 If the rtx is a simple reference to an address via a virtual register,
1685 it can potentially be shared. In such cases, first try to make it
1686 a valid address, which can also be shared. Otherwise, copy it and
1689 First check for common cases that need no processing. These are
1690 usually due to instantiation already being done on a previous instance
1694 if (CONSTANT_ADDRESS_P (temp
)
1695 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1696 || temp
== arg_pointer_rtx
1698 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1699 || temp
== hard_frame_pointer_rtx
1701 || temp
== frame_pointer_rtx
)
1704 if (GET_CODE (temp
) == PLUS
1705 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
1706 && (XEXP (temp
, 0) == frame_pointer_rtx
1707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1708 || XEXP (temp
, 0) == hard_frame_pointer_rtx
1710 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1711 || XEXP (temp
, 0) == arg_pointer_rtx
1716 if (temp
== virtual_stack_vars_rtx
1717 || temp
== virtual_incoming_args_rtx
1718 || (GET_CODE (temp
) == PLUS
1719 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
1720 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
1721 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
1723 /* This MEM may be shared. If the substitution can be done without
1724 the need to generate new pseudos, we want to do it in place
1725 so all copies of the shared rtx benefit. The call below will
1726 only make substitutions if the resulting address is still
1729 Note that we cannot pass X as the object in the recursive call
1730 since the insn being processed may not allow all valid
1731 addresses. However, if we were not passed on object, we can
1732 only modify X without copying it if X will have a valid
1735 ??? Also note that this can still lose if OBJECT is an insn that
1736 has less restrictions on an address that some other insn.
1737 In that case, we will modify the shared address. This case
1738 doesn't seem very likely, though. One case where this could
1739 happen is in the case of a USE or CLOBBER reference, but we
1740 take care of that below. */
1742 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
1743 object
? object
: x
, 0))
1746 /* Otherwise make a copy and process that copy. We copy the entire
1747 RTL expression since it might be a PLUS which could also be
1749 *loc
= x
= copy_rtx (x
);
1752 /* Fall through to generic unary operation case. */
1755 case STRICT_LOW_PART
:
1757 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
1758 case SIGN_EXTEND
: case ZERO_EXTEND
:
1759 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
1760 case FLOAT
: case FIX
:
1761 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
1766 case POPCOUNT
: case PARITY
:
1767 /* These case either have just one operand or we know that we need not
1768 check the rest of the operands. */
1774 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1775 go ahead and make the invalid one, but do it to a copy. For a REG,
1776 just make the recursive call, since there's no chance of a problem. */
1778 if ((MEM_P (XEXP (x
, 0))
1779 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
1781 || (REG_P (XEXP (x
, 0))
1782 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
1785 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
1790 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1791 in front of this insn and substitute the temporary. */
1792 if ((new = instantiate_new_reg (x
, &offset
)) != 0)
1794 temp
= plus_constant (new, offset
);
1795 if (!validate_change (object
, loc
, temp
, 0))
1801 temp
= force_operand (temp
, NULL_RTX
);
1805 emit_insn_before (seq
, object
);
1806 if (! validate_change (object
, loc
, temp
, 0)
1807 && ! validate_replace_rtx (x
, temp
, object
))
1808 instantiate_virtual_regs_lossage (object
);
1818 /* Scan all subexpressions. */
1819 fmt
= GET_RTX_FORMAT (code
);
1820 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1823 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
1826 else if (*fmt
== 'E')
1827 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1828 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
1835 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1836 This means a type for which function calls must pass an address to the
1837 function or get an address back from the function.
1838 EXP may be a type node or an expression (whose type is tested). */
1841 aggregate_value_p (tree exp
, tree fntype
)
1843 int i
, regno
, nregs
;
1846 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1849 switch (TREE_CODE (fntype
))
1852 fntype
= get_callee_fndecl (fntype
);
1853 fntype
= fntype
? TREE_TYPE (fntype
) : 0;
1856 fntype
= TREE_TYPE (fntype
);
1861 case IDENTIFIER_NODE
:
1865 /* We don't expect other rtl types here. */
1869 if (TREE_CODE (type
) == VOID_TYPE
)
1871 /* If the front end has decided that this needs to be passed by
1872 reference, do so. */
1873 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
1874 && DECL_BY_REFERENCE (exp
))
1876 if (targetm
.calls
.return_in_memory (type
, fntype
))
1878 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1879 and thus can't be returned in registers. */
1880 if (TREE_ADDRESSABLE (type
))
1882 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
1884 /* Make sure we have suitable call-clobbered regs to return
1885 the value in; if not, we must return it in memory. */
1886 reg
= hard_function_value (type
, 0, 0);
1888 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1893 regno
= REGNO (reg
);
1894 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
1895 for (i
= 0; i
< nregs
; i
++)
1896 if (! call_used_regs
[regno
+ i
])
1901 /* Return true if we should assign DECL a pseudo register; false if it
1902 should live on the local stack. */
1905 use_register_for_decl (tree decl
)
1907 /* Honor volatile. */
1908 if (TREE_SIDE_EFFECTS (decl
))
1911 /* Honor addressability. */
1912 if (TREE_ADDRESSABLE (decl
))
1915 /* Only register-like things go in registers. */
1916 if (DECL_MODE (decl
) == BLKmode
)
1919 /* If -ffloat-store specified, don't put explicit float variables
1921 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1922 propagates values across these stores, and it probably shouldn't. */
1923 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
1926 /* If we're not interested in tracking debugging information for
1927 this decl, then we can certainly put it in a register. */
1928 if (DECL_IGNORED_P (decl
))
1931 return (optimize
|| DECL_REGISTER (decl
));
1934 /* Return true if TYPE should be passed by invisible reference. */
1937 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1938 tree type
, bool named_arg
)
1942 /* If this type contains non-trivial constructors, then it is
1943 forbidden for the middle-end to create any new copies. */
1944 if (TREE_ADDRESSABLE (type
))
1947 /* GCC post 3.4 passes *all* variable sized types by reference. */
1948 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
1952 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
1955 /* Return true if TYPE, which is passed by reference, should be callee
1956 copied instead of caller copied. */
1959 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1960 tree type
, bool named_arg
)
1962 if (type
&& TREE_ADDRESSABLE (type
))
1964 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
1967 /* Structures to communicate between the subroutines of assign_parms.
1968 The first holds data persistent across all parameters, the second
1969 is cleared out for each parameter. */
1971 struct assign_parm_data_all
1973 CUMULATIVE_ARGS args_so_far
;
1974 struct args_size stack_args_size
;
1975 tree function_result_decl
;
1977 rtx conversion_insns
;
1978 HOST_WIDE_INT pretend_args_size
;
1979 HOST_WIDE_INT extra_pretend_bytes
;
1980 int reg_parm_stack_space
;
1983 struct assign_parm_data_one
1989 enum machine_mode nominal_mode
;
1990 enum machine_mode passed_mode
;
1991 enum machine_mode promoted_mode
;
1992 struct locate_and_pad_arg_data locate
;
1994 BOOL_BITFIELD named_arg
: 1;
1995 BOOL_BITFIELD last_named
: 1;
1996 BOOL_BITFIELD passed_pointer
: 1;
1997 BOOL_BITFIELD on_stack
: 1;
1998 BOOL_BITFIELD loaded_in_reg
: 1;
2001 /* A subroutine of assign_parms. Initialize ALL. */
2004 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2008 memset (all
, 0, sizeof (*all
));
2010 fntype
= TREE_TYPE (current_function_decl
);
2012 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2013 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
2015 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
2016 current_function_decl
, -1);
2019 #ifdef REG_PARM_STACK_SPACE
2020 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2024 /* If ARGS contains entries with complex types, split the entry into two
2025 entries of the component type. Return a new list of substitutions are
2026 needed, else the old list. */
2029 split_complex_args (tree args
)
2033 /* Before allocating memory, check for the common case of no complex. */
2034 for (p
= args
; p
; p
= TREE_CHAIN (p
))
2036 tree type
= TREE_TYPE (p
);
2037 if (TREE_CODE (type
) == COMPLEX_TYPE
2038 && targetm
.calls
.split_complex_arg (type
))
2044 args
= copy_list (args
);
2046 for (p
= args
; p
; p
= TREE_CHAIN (p
))
2048 tree type
= TREE_TYPE (p
);
2049 if (TREE_CODE (type
) == COMPLEX_TYPE
2050 && targetm
.calls
.split_complex_arg (type
))
2053 tree subtype
= TREE_TYPE (type
);
2054 bool addressable
= TREE_ADDRESSABLE (p
);
2056 /* Rewrite the PARM_DECL's type with its component. */
2057 TREE_TYPE (p
) = subtype
;
2058 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2059 DECL_MODE (p
) = VOIDmode
;
2060 DECL_SIZE (p
) = NULL
;
2061 DECL_SIZE_UNIT (p
) = NULL
;
2062 /* If this arg must go in memory, put it in a pseudo here.
2063 We can't allow it to go in memory as per normal parms,
2064 because the usual place might not have the imag part
2065 adjacent to the real part. */
2066 DECL_ARTIFICIAL (p
) = addressable
;
2067 DECL_IGNORED_P (p
) = addressable
;
2068 TREE_ADDRESSABLE (p
) = 0;
2071 /* Build a second synthetic decl. */
2072 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
2073 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2074 DECL_ARTIFICIAL (decl
) = addressable
;
2075 DECL_IGNORED_P (decl
) = addressable
;
2076 layout_decl (decl
, 0);
2078 /* Splice it in; skip the new decl. */
2079 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
2080 TREE_CHAIN (p
) = decl
;
2088 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2089 the hidden struct return argument, and (abi willing) complex args.
2090 Return the new parameter list. */
2093 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2095 tree fndecl
= current_function_decl
;
2096 tree fntype
= TREE_TYPE (fndecl
);
2097 tree fnargs
= DECL_ARGUMENTS (fndecl
);
2099 /* If struct value address is treated as the first argument, make it so. */
2100 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2101 && ! current_function_returns_pcc_struct
2102 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2104 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2107 decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
2108 DECL_ARG_TYPE (decl
) = type
;
2109 DECL_ARTIFICIAL (decl
) = 1;
2110 DECL_IGNORED_P (decl
) = 1;
2112 TREE_CHAIN (decl
) = fnargs
;
2114 all
->function_result_decl
= decl
;
2117 all
->orig_fnargs
= fnargs
;
2119 /* If the target wants to split complex arguments into scalars, do so. */
2120 if (targetm
.calls
.split_complex_arg
)
2121 fnargs
= split_complex_args (fnargs
);
2126 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2127 data for the parameter. Incorporate ABI specifics such as pass-by-
2128 reference and type promotion. */
2131 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2132 struct assign_parm_data_one
*data
)
2134 tree nominal_type
, passed_type
;
2135 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2137 memset (data
, 0, sizeof (*data
));
2139 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2140 if (current_function_stdarg
)
2143 for (tem
= TREE_CHAIN (parm
); tem
; tem
= TREE_CHAIN (tem
))
2144 if (DECL_NAME (tem
))
2147 data
->last_named
= true;
2150 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2151 most machines, if this is a varargs/stdarg function, then we treat
2152 the last named arg as if it were anonymous too. */
2153 if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2154 data
->named_arg
= 1;
2156 data
->named_arg
= !data
->last_named
;
2158 nominal_type
= TREE_TYPE (parm
);
2159 passed_type
= DECL_ARG_TYPE (parm
);
2161 /* Look out for errors propagating this far. Also, if the parameter's
2162 type is void then its value doesn't matter. */
2163 if (TREE_TYPE (parm
) == error_mark_node
2164 /* This can happen after weird syntax errors
2165 or if an enum type is defined among the parms. */
2166 || TREE_CODE (parm
) != PARM_DECL
2167 || passed_type
== NULL
2168 || VOID_TYPE_P (nominal_type
))
2170 nominal_type
= passed_type
= void_type_node
;
2171 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2175 /* Find mode of arg as it is passed, and mode of arg as it should be
2176 during execution of this function. */
2177 passed_mode
= TYPE_MODE (passed_type
);
2178 nominal_mode
= TYPE_MODE (nominal_type
);
2180 /* If the parm is to be passed as a transparent union, use the type of
2181 the first field for the tests below. We have already verified that
2182 the modes are the same. */
2183 if (DECL_TRANSPARENT_UNION (parm
)
2184 || (TREE_CODE (passed_type
) == UNION_TYPE
2185 && TYPE_TRANSPARENT_UNION (passed_type
)))
2186 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
2188 /* See if this arg was passed by invisible reference. */
2189 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2190 passed_type
, data
->named_arg
))
2192 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2193 data
->passed_pointer
= true;
2194 passed_mode
= nominal_mode
= Pmode
;
2197 /* Find mode as it is passed by the ABI. */
2198 promoted_mode
= passed_mode
;
2199 if (targetm
.calls
.promote_function_args (TREE_TYPE (current_function_decl
)))
2201 int unsignedp
= TYPE_UNSIGNED (passed_type
);
2202 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
2207 data
->nominal_type
= nominal_type
;
2208 data
->passed_type
= passed_type
;
2209 data
->nominal_mode
= nominal_mode
;
2210 data
->passed_mode
= passed_mode
;
2211 data
->promoted_mode
= promoted_mode
;
2214 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2217 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2218 struct assign_parm_data_one
*data
, bool no_rtl
)
2220 int varargs_pretend_bytes
= 0;
2222 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2223 data
->promoted_mode
,
2225 &varargs_pretend_bytes
, no_rtl
);
2227 /* If the back-end has requested extra stack space, record how much is
2228 needed. Do not change pretend_args_size otherwise since it may be
2229 nonzero from an earlier partial argument. */
2230 if (varargs_pretend_bytes
> 0)
2231 all
->pretend_args_size
= varargs_pretend_bytes
;
2234 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2235 the incoming location of the current parameter. */
2238 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2239 struct assign_parm_data_one
*data
)
2241 HOST_WIDE_INT pretend_bytes
= 0;
2245 if (data
->promoted_mode
== VOIDmode
)
2247 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2251 #ifdef FUNCTION_INCOMING_ARG
2252 entry_parm
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2253 data
->passed_type
, data
->named_arg
);
2255 entry_parm
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2256 data
->passed_type
, data
->named_arg
);
2259 if (entry_parm
== 0)
2260 data
->promoted_mode
= data
->passed_mode
;
2262 /* Determine parm's home in the stack, in case it arrives in the stack
2263 or we should pretend it did. Compute the stack position and rtx where
2264 the argument arrives and its size.
2266 There is one complexity here: If this was a parameter that would
2267 have been passed in registers, but wasn't only because it is
2268 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2269 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2270 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2271 as it was the previous time. */
2272 in_regs
= entry_parm
!= 0;
2273 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2276 if (!in_regs
&& !data
->named_arg
)
2278 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2281 #ifdef FUNCTION_INCOMING_ARG
2282 tem
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2283 data
->passed_type
, true);
2285 tem
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2286 data
->passed_type
, true);
2288 in_regs
= tem
!= NULL
;
2292 /* If this parameter was passed both in registers and in the stack, use
2293 the copy on the stack. */
2294 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2302 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2303 data
->promoted_mode
,
2306 data
->partial
= partial
;
2308 /* The caller might already have allocated stack space for the
2309 register parameters. */
2310 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2312 /* Part of this argument is passed in registers and part
2313 is passed on the stack. Ask the prologue code to extend
2314 the stack part so that we can recreate the full value.
2316 PRETEND_BYTES is the size of the registers we need to store.
2317 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2318 stack space that the prologue should allocate.
2320 Internally, gcc assumes that the argument pointer is aligned
2321 to STACK_BOUNDARY bits. This is used both for alignment
2322 optimizations (see init_emit) and to locate arguments that are
2323 aligned to more than PARM_BOUNDARY bits. We must preserve this
2324 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2325 a stack boundary. */
2327 /* We assume at most one partial arg, and it must be the first
2328 argument on the stack. */
2329 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2331 pretend_bytes
= partial
;
2332 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2334 /* We want to align relative to the actual stack pointer, so
2335 don't include this in the stack size until later. */
2336 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2340 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2341 entry_parm
? data
->partial
: 0, current_function_decl
,
2342 &all
->stack_args_size
, &data
->locate
);
2344 /* Adjust offsets to include the pretend args. */
2345 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2346 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2347 data
->locate
.offset
.constant
+= pretend_bytes
;
2349 data
->entry_parm
= entry_parm
;
2352 /* A subroutine of assign_parms. If there is actually space on the stack
2353 for this parm, count it in stack_args_size and return true. */
2356 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2357 struct assign_parm_data_one
*data
)
2359 /* Trivially true if we've no incoming register. */
2360 if (data
->entry_parm
== NULL
)
2362 /* Also true if we're partially in registers and partially not,
2363 since we've arranged to drop the entire argument on the stack. */
2364 else if (data
->partial
!= 0)
2366 /* Also true if the target says that it's passed in both registers
2367 and on the stack. */
2368 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2369 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2371 /* Also true if the target says that there's stack allocated for
2372 all register parameters. */
2373 else if (all
->reg_parm_stack_space
> 0)
2375 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2379 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2380 if (data
->locate
.size
.var
)
2381 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2386 /* A subroutine of assign_parms. Given that this parameter is allocated
2387 stack space by the ABI, find it. */
2390 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2392 rtx offset_rtx
, stack_parm
;
2393 unsigned int align
, boundary
;
2395 /* If we're passing this arg using a reg, make its stack home the
2396 aligned stack slot. */
2397 if (data
->entry_parm
)
2398 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2400 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2402 stack_parm
= current_function_internal_arg_pointer
;
2403 if (offset_rtx
!= const0_rtx
)
2404 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2405 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2407 set_mem_attributes (stack_parm
, parm
, 1);
2409 boundary
= data
->locate
.boundary
;
2410 align
= BITS_PER_UNIT
;
2412 /* If we're padding upward, we know that the alignment of the slot
2413 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2414 intentionally forcing upward padding. Otherwise we have to come
2415 up with a guess at the alignment based on OFFSET_RTX. */
2416 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2418 else if (GET_CODE (offset_rtx
) == CONST_INT
)
2420 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2421 align
= align
& -align
;
2423 set_mem_align (stack_parm
, align
);
2425 if (data
->entry_parm
)
2426 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2428 data
->stack_parm
= stack_parm
;
2431 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2432 always valid and contiguous. */
2435 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2437 rtx entry_parm
= data
->entry_parm
;
2438 rtx stack_parm
= data
->stack_parm
;
2440 /* If this parm was passed part in regs and part in memory, pretend it
2441 arrived entirely in memory by pushing the register-part onto the stack.
2442 In the special case of a DImode or DFmode that is split, we could put
2443 it together in a pseudoreg directly, but for now that's not worth
2445 if (data
->partial
!= 0)
2447 /* Handle calls that pass values in multiple non-contiguous
2448 locations. The Irix 6 ABI has examples of this. */
2449 if (GET_CODE (entry_parm
) == PARALLEL
)
2450 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2452 int_size_in_bytes (data
->passed_type
));
2455 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2456 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2457 data
->partial
/ UNITS_PER_WORD
);
2460 entry_parm
= stack_parm
;
2463 /* If we didn't decide this parm came in a register, by default it came
2465 else if (entry_parm
== NULL
)
2466 entry_parm
= stack_parm
;
2468 /* When an argument is passed in multiple locations, we can't make use
2469 of this information, but we can save some copying if the whole argument
2470 is passed in a single register. */
2471 else if (GET_CODE (entry_parm
) == PARALLEL
2472 && data
->nominal_mode
!= BLKmode
2473 && data
->passed_mode
!= BLKmode
)
2475 size_t i
, len
= XVECLEN (entry_parm
, 0);
2477 for (i
= 0; i
< len
; i
++)
2478 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2479 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2480 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2481 == data
->passed_mode
)
2482 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2484 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2489 data
->entry_parm
= entry_parm
;
2492 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2493 always valid and properly aligned. */
2496 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2498 rtx stack_parm
= data
->stack_parm
;
2500 /* If we can't trust the parm stack slot to be aligned enough for its
2501 ultimate type, don't use that slot after entry. We'll make another
2502 stack slot, if we need one. */
2504 && ((STRICT_ALIGNMENT
2505 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2506 || (data
->nominal_type
2507 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2508 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2511 /* If parm was passed in memory, and we need to convert it on entry,
2512 don't store it back in that same slot. */
2513 else if (data
->entry_parm
== stack_parm
2514 && data
->nominal_mode
!= BLKmode
2515 && data
->nominal_mode
!= data
->passed_mode
)
2518 data
->stack_parm
= stack_parm
;
2521 /* A subroutine of assign_parms. Return true if the current parameter
2522 should be stored as a BLKmode in the current frame. */
2525 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2527 if (data
->nominal_mode
== BLKmode
)
2529 if (GET_CODE (data
->entry_parm
) == PARALLEL
)
2532 #ifdef BLOCK_REG_PADDING
2533 /* Only assign_parm_setup_block knows how to deal with register arguments
2534 that are padded at the least significant end. */
2535 if (REG_P (data
->entry_parm
)
2536 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2537 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2538 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2545 /* A subroutine of assign_parms. Arrange for the parameter to be
2546 present and valid in DATA->STACK_RTL. */
2549 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2550 tree parm
, struct assign_parm_data_one
*data
)
2552 rtx entry_parm
= data
->entry_parm
;
2553 rtx stack_parm
= data
->stack_parm
;
2555 HOST_WIDE_INT size_stored
;
2556 rtx orig_entry_parm
= entry_parm
;
2558 if (GET_CODE (entry_parm
) == PARALLEL
)
2559 entry_parm
= emit_group_move_into_temps (entry_parm
);
2561 /* If we've a non-block object that's nevertheless passed in parts,
2562 reconstitute it in register operations rather than on the stack. */
2563 if (GET_CODE (entry_parm
) == PARALLEL
2564 && data
->nominal_mode
!= BLKmode
)
2566 rtx elt0
= XEXP (XVECEXP (orig_entry_parm
, 0, 0), 0);
2568 if ((XVECLEN (entry_parm
, 0) > 1
2569 || hard_regno_nregs
[REGNO (elt0
)][GET_MODE (elt0
)] > 1)
2570 && use_register_for_decl (parm
))
2572 rtx parmreg
= gen_reg_rtx (data
->nominal_mode
);
2574 push_to_sequence (all
->conversion_insns
);
2576 /* For values returned in multiple registers, handle possible
2577 incompatible calls to emit_group_store.
2579 For example, the following would be invalid, and would have to
2580 be fixed by the conditional below:
2582 emit_group_store ((reg:SF), (parallel:DF))
2583 emit_group_store ((reg:SI), (parallel:DI))
2585 An example of this are doubles in e500 v2:
2586 (parallel:DF (expr_list (reg:SI) (const_int 0))
2587 (expr_list (reg:SI) (const_int 4))). */
2588 if (data
->nominal_mode
!= data
->passed_mode
)
2590 rtx t
= gen_reg_rtx (GET_MODE (entry_parm
));
2591 emit_group_store (t
, entry_parm
, NULL_TREE
,
2592 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2593 convert_move (parmreg
, t
, 0);
2596 emit_group_store (parmreg
, entry_parm
, data
->nominal_type
,
2597 int_size_in_bytes (data
->nominal_type
));
2599 all
->conversion_insns
= get_insns ();
2602 SET_DECL_RTL (parm
, parmreg
);
2607 size
= int_size_in_bytes (data
->passed_type
);
2608 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2609 if (stack_parm
== 0)
2611 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2612 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2614 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2615 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2616 set_mem_attributes (stack_parm
, parm
, 1);
2619 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2620 calls that pass values in multiple non-contiguous locations. */
2621 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2625 /* Note that we will be storing an integral number of words.
2626 So we have to be careful to ensure that we allocate an
2627 integral number of words. We do this above when we call
2628 assign_stack_local if space was not allocated in the argument
2629 list. If it was, this will not work if PARM_BOUNDARY is not
2630 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2631 if it becomes a problem. Exception is when BLKmode arrives
2632 with arguments not conforming to word_mode. */
2634 if (data
->stack_parm
== 0)
2636 else if (GET_CODE (entry_parm
) == PARALLEL
)
2639 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2641 mem
= validize_mem (stack_parm
);
2643 /* Handle values in multiple non-contiguous locations. */
2644 if (GET_CODE (entry_parm
) == PARALLEL
)
2646 push_to_sequence (all
->conversion_insns
);
2647 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2648 all
->conversion_insns
= get_insns ();
2655 /* If SIZE is that of a mode no bigger than a word, just use
2656 that mode's store operation. */
2657 else if (size
<= UNITS_PER_WORD
)
2659 enum machine_mode mode
2660 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2663 #ifdef BLOCK_REG_PADDING
2664 && (size
== UNITS_PER_WORD
2665 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2666 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2670 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2671 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2674 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2675 machine must be aligned to the left before storing
2676 to memory. Note that the previous test doesn't
2677 handle all cases (e.g. SIZE == 3). */
2678 else if (size
!= UNITS_PER_WORD
2679 #ifdef BLOCK_REG_PADDING
2680 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2688 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2689 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2691 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2692 build_int_cst (NULL_TREE
, by
),
2694 tem
= change_address (mem
, word_mode
, 0);
2695 emit_move_insn (tem
, x
);
2698 move_block_from_reg (REGNO (entry_parm
), mem
,
2699 size_stored
/ UNITS_PER_WORD
);
2702 move_block_from_reg (REGNO (entry_parm
), mem
,
2703 size_stored
/ UNITS_PER_WORD
);
2705 else if (data
->stack_parm
== 0)
2707 push_to_sequence (all
->conversion_insns
);
2708 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2710 all
->conversion_insns
= get_insns ();
2714 data
->stack_parm
= stack_parm
;
2715 SET_DECL_RTL (parm
, stack_parm
);
2718 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2719 parameter. Get it there. Perform all ABI specified conversions. */
2722 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2723 struct assign_parm_data_one
*data
)
2726 enum machine_mode promoted_nominal_mode
;
2727 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2728 bool did_conversion
= false;
2730 /* Store the parm in a pseudoregister during the function, but we may
2731 need to do it in a wider mode. */
2733 promoted_nominal_mode
2734 = promote_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
, 0);
2736 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2738 if (!DECL_ARTIFICIAL (parm
))
2739 mark_user_reg (parmreg
);
2741 /* If this was an item that we received a pointer to,
2742 set DECL_RTL appropriately. */
2743 if (data
->passed_pointer
)
2745 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2746 set_mem_attributes (x
, parm
, 1);
2747 SET_DECL_RTL (parm
, x
);
2750 SET_DECL_RTL (parm
, parmreg
);
2752 /* Copy the value into the register. */
2753 if (data
->nominal_mode
!= data
->passed_mode
2754 || promoted_nominal_mode
!= data
->promoted_mode
)
2758 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2759 mode, by the caller. We now have to convert it to
2760 NOMINAL_MODE, if different. However, PARMREG may be in
2761 a different mode than NOMINAL_MODE if it is being stored
2764 If ENTRY_PARM is a hard register, it might be in a register
2765 not valid for operating in its mode (e.g., an odd-numbered
2766 register for a DFmode). In that case, moves are the only
2767 thing valid, so we can't do a convert from there. This
2768 occurs when the calling sequence allow such misaligned
2771 In addition, the conversion may involve a call, which could
2772 clobber parameters which haven't been copied to pseudo
2773 registers yet. Therefore, we must first copy the parm to
2774 a pseudo reg here, and save the conversion until after all
2775 parameters have been moved. */
2777 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2779 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2781 push_to_sequence (all
->conversion_insns
);
2782 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
2784 if (GET_CODE (tempreg
) == SUBREG
2785 && GET_MODE (tempreg
) == data
->nominal_mode
2786 && REG_P (SUBREG_REG (tempreg
))
2787 && data
->nominal_mode
== data
->passed_mode
2788 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
2789 && GET_MODE_SIZE (GET_MODE (tempreg
))
2790 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
2792 /* The argument is already sign/zero extended, so note it
2794 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
2795 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
2798 /* TREE_USED gets set erroneously during expand_assignment. */
2799 save_tree_used
= TREE_USED (parm
);
2800 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
));
2801 TREE_USED (parm
) = save_tree_used
;
2802 all
->conversion_insns
= get_insns ();
2805 did_conversion
= true;
2808 emit_move_insn (parmreg
, validize_mem (data
->entry_parm
));
2810 /* If we were passed a pointer but the actual value can safely live
2811 in a register, put it in one. */
2812 if (data
->passed_pointer
2813 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
2814 /* If by-reference argument was promoted, demote it. */
2815 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
2816 || use_register_for_decl (parm
)))
2818 /* We can't use nominal_mode, because it will have been set to
2819 Pmode above. We must use the actual mode of the parm. */
2820 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
2821 mark_user_reg (parmreg
);
2823 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
2825 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
2826 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2828 push_to_sequence (all
->conversion_insns
);
2829 emit_move_insn (tempreg
, DECL_RTL (parm
));
2830 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
2831 emit_move_insn (parmreg
, tempreg
);
2832 all
->conversion_insns
= get_insns ();
2835 did_conversion
= true;
2838 emit_move_insn (parmreg
, DECL_RTL (parm
));
2840 SET_DECL_RTL (parm
, parmreg
);
2842 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2844 data
->stack_parm
= NULL
;
2847 /* Mark the register as eliminable if we did no conversion and it was
2848 copied from memory at a fixed offset, and the arg pointer was not
2849 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2850 offset formed an invalid address, such memory-equivalences as we
2851 make here would screw up life analysis for it. */
2852 if (data
->nominal_mode
== data
->passed_mode
2854 && data
->stack_parm
!= 0
2855 && MEM_P (data
->stack_parm
)
2856 && data
->locate
.offset
.var
== 0
2857 && reg_mentioned_p (virtual_incoming_args_rtx
,
2858 XEXP (data
->stack_parm
, 0)))
2860 rtx linsn
= get_last_insn ();
2863 /* Mark complex types separately. */
2864 if (GET_CODE (parmreg
) == CONCAT
)
2866 enum machine_mode submode
2867 = GET_MODE_INNER (GET_MODE (parmreg
));
2868 int regnor
= REGNO (XEXP (parmreg
, 0));
2869 int regnoi
= REGNO (XEXP (parmreg
, 1));
2870 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
2871 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
2872 GET_MODE_SIZE (submode
));
2874 /* Scan backwards for the set of the real and
2876 for (sinsn
= linsn
; sinsn
!= 0;
2877 sinsn
= prev_nonnote_insn (sinsn
))
2879 set
= single_set (sinsn
);
2883 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
2885 = gen_rtx_EXPR_LIST (REG_EQUIV
, stacki
,
2887 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
2889 = gen_rtx_EXPR_LIST (REG_EQUIV
, stackr
,
2893 else if ((set
= single_set (linsn
)) != 0
2894 && SET_DEST (set
) == parmreg
)
2896 = gen_rtx_EXPR_LIST (REG_EQUIV
,
2897 data
->stack_parm
, REG_NOTES (linsn
));
2900 /* For pointer data type, suggest pointer register. */
2901 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
2902 mark_reg_pointer (parmreg
,
2903 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
2906 /* A subroutine of assign_parms. Allocate stack space to hold the current
2907 parameter. Get it there. Perform all ABI specified conversions. */
2910 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
2911 struct assign_parm_data_one
*data
)
2913 /* Value must be stored in the stack slot STACK_PARM during function
2915 bool to_conversion
= false;
2917 if (data
->promoted_mode
!= data
->nominal_mode
)
2919 /* Conversion is required. */
2920 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2922 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2924 push_to_sequence (all
->conversion_insns
);
2925 to_conversion
= true;
2927 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
2928 TYPE_UNSIGNED (TREE_TYPE (parm
)));
2930 if (data
->stack_parm
)
2931 /* ??? This may need a big-endian conversion on sparc64. */
2933 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
2936 if (data
->entry_parm
!= data
->stack_parm
)
2940 if (data
->stack_parm
== 0)
2943 = assign_stack_local (GET_MODE (data
->entry_parm
),
2944 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
2945 TYPE_ALIGN (data
->passed_type
));
2946 set_mem_attributes (data
->stack_parm
, parm
, 1);
2949 dest
= validize_mem (data
->stack_parm
);
2950 src
= validize_mem (data
->entry_parm
);
2954 /* Use a block move to handle potentially misaligned entry_parm. */
2956 push_to_sequence (all
->conversion_insns
);
2957 to_conversion
= true;
2959 emit_block_move (dest
, src
,
2960 GEN_INT (int_size_in_bytes (data
->passed_type
)),
2964 emit_move_insn (dest
, src
);
2969 all
->conversion_insns
= get_insns ();
2973 SET_DECL_RTL (parm
, data
->stack_parm
);
2976 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2977 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2980 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
, tree fnargs
)
2983 tree orig_fnargs
= all
->orig_fnargs
;
2985 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2987 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
2988 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
2990 rtx tmp
, real
, imag
;
2991 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
2993 real
= DECL_RTL (fnargs
);
2994 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
2995 if (inner
!= GET_MODE (real
))
2997 real
= gen_lowpart_SUBREG (inner
, real
);
2998 imag
= gen_lowpart_SUBREG (inner
, imag
);
3001 if (TREE_ADDRESSABLE (parm
))
3004 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3006 /* split_complex_arg put the real and imag parts in
3007 pseudos. Move them to memory. */
3008 tmp
= assign_stack_local (DECL_MODE (parm
), size
,
3009 TYPE_ALIGN (TREE_TYPE (parm
)));
3010 set_mem_attributes (tmp
, parm
, 1);
3011 rmem
= adjust_address_nv (tmp
, inner
, 0);
3012 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3013 push_to_sequence (all
->conversion_insns
);
3014 emit_move_insn (rmem
, real
);
3015 emit_move_insn (imem
, imag
);
3016 all
->conversion_insns
= get_insns ();
3020 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3021 SET_DECL_RTL (parm
, tmp
);
3023 real
= DECL_INCOMING_RTL (fnargs
);
3024 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
3025 if (inner
!= GET_MODE (real
))
3027 real
= gen_lowpart_SUBREG (inner
, real
);
3028 imag
= gen_lowpart_SUBREG (inner
, imag
);
3030 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3031 set_decl_incoming_rtl (parm
, tmp
);
3032 fnargs
= TREE_CHAIN (fnargs
);
3036 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
3037 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
));
3039 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3040 instead of the copy of decl, i.e. FNARGS. */
3041 if (DECL_INCOMING_RTL (parm
) && MEM_P (DECL_INCOMING_RTL (parm
)))
3042 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
3045 fnargs
= TREE_CHAIN (fnargs
);
3049 /* Assign RTL expressions to the function's parameters. This may involve
3050 copying them into registers and using those registers as the DECL_RTL. */
3053 assign_parms (tree fndecl
)
3055 struct assign_parm_data_all all
;
3057 rtx internal_arg_pointer
;
3058 int varargs_setup
= 0;
3060 /* If the reg that the virtual arg pointer will be translated into is
3061 not a fixed reg or is the stack pointer, make a copy of the virtual
3062 arg pointer, and address parms via the copy. The frame pointer is
3063 considered fixed even though it is not marked as such.
3065 The second time through, simply use ap to avoid generating rtx. */
3067 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3068 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3069 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
3070 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3072 internal_arg_pointer
= virtual_incoming_args_rtx
;
3073 current_function_internal_arg_pointer
= internal_arg_pointer
;
3075 assign_parms_initialize_all (&all
);
3076 fnargs
= assign_parms_augmented_arg_list (&all
);
3078 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3080 struct assign_parm_data_one data
;
3082 /* Extract the type of PARM; adjust it according to ABI. */
3083 assign_parm_find_data_types (&all
, parm
, &data
);
3085 /* Early out for errors and void parameters. */
3086 if (data
.passed_mode
== VOIDmode
)
3088 SET_DECL_RTL (parm
, const0_rtx
);
3089 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3093 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3094 for the unnamed dummy argument following the last named argument.
3095 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3096 we only want to do this when we get to the actual last named
3097 argument, which will be the first time LAST_NAMED gets set. */
3098 if (data
.last_named
&& !varargs_setup
)
3100 varargs_setup
= true;
3101 assign_parms_setup_varargs (&all
, &data
, false);
3104 /* Find out where the parameter arrives in this function. */
3105 assign_parm_find_entry_rtl (&all
, &data
);
3107 /* Find out where stack space for this parameter might be. */
3108 if (assign_parm_is_stack_parm (&all
, &data
))
3110 assign_parm_find_stack_rtl (parm
, &data
);
3111 assign_parm_adjust_entry_rtl (&data
);
3114 /* Record permanently how this parm was passed. */
3115 set_decl_incoming_rtl (parm
, data
.entry_parm
);
3117 /* Update info on where next arg arrives in registers. */
3118 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3119 data
.passed_type
, data
.named_arg
);
3121 assign_parm_adjust_stack_rtl (&data
);
3123 if (assign_parm_setup_block_p (&data
))
3124 assign_parm_setup_block (&all
, parm
, &data
);
3125 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3126 assign_parm_setup_reg (&all
, parm
, &data
);
3128 assign_parm_setup_stack (&all
, parm
, &data
);
3131 if (targetm
.calls
.split_complex_arg
&& fnargs
!= all
.orig_fnargs
)
3132 assign_parms_unsplit_complex (&all
, fnargs
);
3134 /* Output all parameter conversion instructions (possibly including calls)
3135 now that all parameters have been copied out of hard registers. */
3136 emit_insn (all
.conversion_insns
);
3138 /* If we are receiving a struct value address as the first argument, set up
3139 the RTL for the function result. As this might require code to convert
3140 the transmitted address to Pmode, we do this here to ensure that possible
3141 preliminary conversions of the address have been emitted already. */
3142 if (all
.function_result_decl
)
3144 tree result
= DECL_RESULT (current_function_decl
);
3145 rtx addr
= DECL_RTL (all
.function_result_decl
);
3148 if (DECL_BY_REFERENCE (result
))
3152 addr
= convert_memory_address (Pmode
, addr
);
3153 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3154 set_mem_attributes (x
, result
, 1);
3156 SET_DECL_RTL (result
, x
);
3159 /* We have aligned all the args, so add space for the pretend args. */
3160 current_function_pretend_args_size
= all
.pretend_args_size
;
3161 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3162 current_function_args_size
= all
.stack_args_size
.constant
;
3164 /* Adjust function incoming argument size for alignment and
3167 #ifdef REG_PARM_STACK_SPACE
3168 current_function_args_size
= MAX (current_function_args_size
,
3169 REG_PARM_STACK_SPACE (fndecl
));
3172 current_function_args_size
3173 = ((current_function_args_size
+ STACK_BYTES
- 1)
3174 / STACK_BYTES
) * STACK_BYTES
;
3176 #ifdef ARGS_GROW_DOWNWARD
3177 current_function_arg_offset_rtx
3178 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3179 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3180 size_int (-all
.stack_args_size
.constant
)),
3181 NULL_RTX
, VOIDmode
, 0));
3183 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3186 /* See how many bytes, if any, of its args a function should try to pop
3189 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3190 current_function_args_size
);
3192 /* For stdarg.h function, save info about
3193 regs and stack space used by the named args. */
3195 current_function_args_info
= all
.args_so_far
;
3197 /* Set the rtx used for the function return value. Put this in its
3198 own variable so any optimizers that need this information don't have
3199 to include tree.h. Do this here so it gets done when an inlined
3200 function gets output. */
3202 current_function_return_rtx
3203 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3204 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3206 /* If scalar return value was computed in a pseudo-reg, or was a named
3207 return value that got dumped to the stack, copy that to the hard
3209 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3211 tree decl_result
= DECL_RESULT (fndecl
);
3212 rtx decl_rtl
= DECL_RTL (decl_result
);
3214 if (REG_P (decl_rtl
)
3215 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3216 : DECL_REGISTER (decl_result
))
3220 #ifdef FUNCTION_OUTGOING_VALUE
3221 real_decl_rtl
= FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result
),
3224 real_decl_rtl
= FUNCTION_VALUE (TREE_TYPE (decl_result
),
3227 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3228 /* The delay slot scheduler assumes that current_function_return_rtx
3229 holds the hard register containing the return value, not a
3230 temporary pseudo. */
3231 current_function_return_rtx
= real_decl_rtl
;
3236 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3237 For all seen types, gimplify their sizes. */
3240 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3247 if (POINTER_TYPE_P (t
))
3249 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3250 && !TYPE_SIZES_GIMPLIFIED (t
))
3252 gimplify_type_sizes (t
, (tree
*) data
);
3260 /* Gimplify the parameter list for current_function_decl. This involves
3261 evaluating SAVE_EXPRs of variable sized parameters and generating code
3262 to implement callee-copies reference parameters. Returns a list of
3263 statements to add to the beginning of the function, or NULL if nothing
3267 gimplify_parameters (void)
3269 struct assign_parm_data_all all
;
3270 tree fnargs
, parm
, stmts
= NULL
;
3272 assign_parms_initialize_all (&all
);
3273 fnargs
= assign_parms_augmented_arg_list (&all
);
3275 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3277 struct assign_parm_data_one data
;
3279 /* Extract the type of PARM; adjust it according to ABI. */
3280 assign_parm_find_data_types (&all
, parm
, &data
);
3282 /* Early out for errors and void parameters. */
3283 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3286 /* Update info on where next arg arrives in registers. */
3287 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3288 data
.passed_type
, data
.named_arg
);
3290 /* ??? Once upon a time variable_size stuffed parameter list
3291 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3292 turned out to be less than manageable in the gimple world.
3293 Now we have to hunt them down ourselves. */
3294 walk_tree_without_duplicates (&data
.passed_type
,
3295 gimplify_parm_type
, &stmts
);
3297 if (!TREE_CONSTANT (DECL_SIZE (parm
)))
3299 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3300 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3303 if (data
.passed_pointer
)
3305 tree type
= TREE_TYPE (data
.passed_type
);
3306 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3307 type
, data
.named_arg
))
3311 /* For constant sized objects, this is trivial; for
3312 variable-sized objects, we have to play games. */
3313 if (TREE_CONSTANT (DECL_SIZE (parm
)))
3315 local
= create_tmp_var (type
, get_name (parm
));
3316 DECL_IGNORED_P (local
) = 0;
3320 tree ptr_type
, addr
, args
;
3322 ptr_type
= build_pointer_type (type
);
3323 addr
= create_tmp_var (ptr_type
, get_name (parm
));
3324 DECL_IGNORED_P (addr
) = 0;
3325 local
= build_fold_indirect_ref (addr
);
3327 args
= tree_cons (NULL
, DECL_SIZE_UNIT (parm
), NULL
);
3328 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3329 t
= build_function_call_expr (t
, args
);
3330 t
= fold_convert (ptr_type
, t
);
3331 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
3332 gimplify_and_add (t
, &stmts
);
3335 t
= build2 (MODIFY_EXPR
, void_type_node
, local
, parm
);
3336 gimplify_and_add (t
, &stmts
);
3338 DECL_VALUE_EXPR (parm
) = local
;
3346 /* Indicate whether REGNO is an incoming argument to the current function
3347 that was promoted to a wider mode. If so, return the RTX for the
3348 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3349 that REGNO is promoted from and whether the promotion was signed or
3353 promoted_input_arg (unsigned int regno
, enum machine_mode
*pmode
, int *punsignedp
)
3357 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
3358 arg
= TREE_CHAIN (arg
))
3359 if (REG_P (DECL_INCOMING_RTL (arg
))
3360 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
3361 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
3363 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
3364 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (arg
));
3366 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
3367 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
3368 && mode
!= DECL_MODE (arg
))
3370 *pmode
= DECL_MODE (arg
);
3371 *punsignedp
= unsignedp
;
3372 return DECL_INCOMING_RTL (arg
);
3380 /* Compute the size and offset from the start of the stacked arguments for a
3381 parm passed in mode PASSED_MODE and with type TYPE.
3383 INITIAL_OFFSET_PTR points to the current offset into the stacked
3386 The starting offset and size for this parm are returned in
3387 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3388 nonzero, the offset is that of stack slot, which is returned in
3389 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3390 padding required from the initial offset ptr to the stack slot.
3392 IN_REGS is nonzero if the argument will be passed in registers. It will
3393 never be set if REG_PARM_STACK_SPACE is not defined.
3395 FNDECL is the function in which the argument was defined.
3397 There are two types of rounding that are done. The first, controlled by
3398 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3399 list to be aligned to the specific boundary (in bits). This rounding
3400 affects the initial and starting offsets, but not the argument size.
3402 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3403 optionally rounds the size of the parm to PARM_BOUNDARY. The
3404 initial offset is not affected by this rounding, while the size always
3405 is and the starting offset may be. */
3407 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3408 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3409 callers pass in the total size of args so far as
3410 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3413 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3414 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3415 struct args_size
*initial_offset_ptr
,
3416 struct locate_and_pad_arg_data
*locate
)
3419 enum direction where_pad
;
3421 int reg_parm_stack_space
= 0;
3422 int part_size_in_regs
;
3424 #ifdef REG_PARM_STACK_SPACE
3425 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3427 /* If we have found a stack parm before we reach the end of the
3428 area reserved for registers, skip that area. */
3431 if (reg_parm_stack_space
> 0)
3433 if (initial_offset_ptr
->var
)
3435 initial_offset_ptr
->var
3436 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3437 ssize_int (reg_parm_stack_space
));
3438 initial_offset_ptr
->constant
= 0;
3440 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3441 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3444 #endif /* REG_PARM_STACK_SPACE */
3446 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3449 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3450 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3451 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3452 locate
->where_pad
= where_pad
;
3453 locate
->boundary
= boundary
;
3455 #ifdef ARGS_GROW_DOWNWARD
3456 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3457 if (initial_offset_ptr
->var
)
3458 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3459 initial_offset_ptr
->var
);
3463 if (where_pad
!= none
3464 && (!host_integerp (sizetree
, 1)
3465 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3466 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3467 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3470 locate
->slot_offset
.constant
+= part_size_in_regs
;
3473 #ifdef REG_PARM_STACK_SPACE
3474 || REG_PARM_STACK_SPACE (fndecl
) > 0
3477 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3478 &locate
->alignment_pad
);
3480 locate
->size
.constant
= (-initial_offset_ptr
->constant
3481 - locate
->slot_offset
.constant
);
3482 if (initial_offset_ptr
->var
)
3483 locate
->size
.var
= size_binop (MINUS_EXPR
,
3484 size_binop (MINUS_EXPR
,
3486 initial_offset_ptr
->var
),
3487 locate
->slot_offset
.var
);
3489 /* Pad_below needs the pre-rounded size to know how much to pad
3491 locate
->offset
= locate
->slot_offset
;
3492 if (where_pad
== downward
)
3493 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3495 #else /* !ARGS_GROW_DOWNWARD */
3497 #ifdef REG_PARM_STACK_SPACE
3498 || REG_PARM_STACK_SPACE (fndecl
) > 0
3501 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3502 &locate
->alignment_pad
);
3503 locate
->slot_offset
= *initial_offset_ptr
;
3505 #ifdef PUSH_ROUNDING
3506 if (passed_mode
!= BLKmode
)
3507 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3510 /* Pad_below needs the pre-rounded size to know how much to pad below
3511 so this must be done before rounding up. */
3512 locate
->offset
= locate
->slot_offset
;
3513 if (where_pad
== downward
)
3514 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3516 if (where_pad
!= none
3517 && (!host_integerp (sizetree
, 1)
3518 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3519 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3521 ADD_PARM_SIZE (locate
->size
, sizetree
);
3523 locate
->size
.constant
-= part_size_in_regs
;
3524 #endif /* ARGS_GROW_DOWNWARD */
3527 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3528 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3531 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3532 struct args_size
*alignment_pad
)
3534 tree save_var
= NULL_TREE
;
3535 HOST_WIDE_INT save_constant
= 0;
3536 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3537 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3539 #ifdef SPARC_STACK_BOUNDARY_HACK
3540 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3541 higher than the real alignment of %sp. However, when it does this,
3542 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3543 This is a temporary hack while the sparc port is fixed. */
3544 if (SPARC_STACK_BOUNDARY_HACK
)
3548 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3550 save_var
= offset_ptr
->var
;
3551 save_constant
= offset_ptr
->constant
;
3554 alignment_pad
->var
= NULL_TREE
;
3555 alignment_pad
->constant
= 0;
3557 if (boundary
> BITS_PER_UNIT
)
3559 if (offset_ptr
->var
)
3561 tree sp_offset_tree
= ssize_int (sp_offset
);
3562 tree offset
= size_binop (PLUS_EXPR
,
3563 ARGS_SIZE_TREE (*offset_ptr
),
3565 #ifdef ARGS_GROW_DOWNWARD
3566 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3568 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3571 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3572 /* ARGS_SIZE_TREE includes constant term. */
3573 offset_ptr
->constant
= 0;
3574 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3575 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3580 offset_ptr
->constant
= -sp_offset
+
3581 #ifdef ARGS_GROW_DOWNWARD
3582 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3584 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3586 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3587 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3593 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3595 if (passed_mode
!= BLKmode
)
3597 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3598 offset_ptr
->constant
3599 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3600 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3601 - GET_MODE_SIZE (passed_mode
));
3605 if (TREE_CODE (sizetree
) != INTEGER_CST
3606 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3608 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3609 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3611 ADD_PARM_SIZE (*offset_ptr
, s2
);
3612 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3617 /* Walk the tree of blocks describing the binding levels within a function
3618 and warn about variables the might be killed by setjmp or vfork.
3619 This is done after calling flow_analysis and before global_alloc
3620 clobbers the pseudo-regs to hard regs. */
3623 setjmp_vars_warning (tree block
)
3627 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3629 if (TREE_CODE (decl
) == VAR_DECL
3630 && DECL_RTL_SET_P (decl
)
3631 && REG_P (DECL_RTL (decl
))
3632 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3633 warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
3638 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3639 setjmp_vars_warning (sub
);
3642 /* Do the appropriate part of setjmp_vars_warning
3643 but for arguments instead of local variables. */
3646 setjmp_args_warning (void)
3649 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3650 decl
; decl
= TREE_CHAIN (decl
))
3651 if (DECL_RTL (decl
) != 0
3652 && REG_P (DECL_RTL (decl
))
3653 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3654 warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
3659 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3660 and create duplicate blocks. */
3661 /* ??? Need an option to either create block fragments or to create
3662 abstract origin duplicates of a source block. It really depends
3663 on what optimization has been performed. */
3666 reorder_blocks (void)
3668 tree block
= DECL_INITIAL (current_function_decl
);
3669 varray_type block_stack
;
3671 if (block
== NULL_TREE
)
3674 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
3676 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3677 clear_block_marks (block
);
3679 /* Prune the old trees away, so that they don't get in the way. */
3680 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
3681 BLOCK_CHAIN (block
) = NULL_TREE
;
3683 /* Recreate the block tree from the note nesting. */
3684 reorder_blocks_1 (get_insns (), block
, &block_stack
);
3685 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
3687 /* Remove deleted blocks from the block fragment chains. */
3688 reorder_fix_fragments (block
);
3691 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3694 clear_block_marks (tree block
)
3698 TREE_ASM_WRITTEN (block
) = 0;
3699 clear_block_marks (BLOCK_SUBBLOCKS (block
));
3700 block
= BLOCK_CHAIN (block
);
3705 reorder_blocks_1 (rtx insns
, tree current_block
, varray_type
*p_block_stack
)
3709 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3713 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
3715 tree block
= NOTE_BLOCK (insn
);
3717 /* If we have seen this block before, that means it now
3718 spans multiple address regions. Create a new fragment. */
3719 if (TREE_ASM_WRITTEN (block
))
3721 tree new_block
= copy_node (block
);
3724 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
3725 ? BLOCK_FRAGMENT_ORIGIN (block
)
3727 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
3728 BLOCK_FRAGMENT_CHAIN (new_block
)
3729 = BLOCK_FRAGMENT_CHAIN (origin
);
3730 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
3732 NOTE_BLOCK (insn
) = new_block
;
3736 BLOCK_SUBBLOCKS (block
) = 0;
3737 TREE_ASM_WRITTEN (block
) = 1;
3738 /* When there's only one block for the entire function,
3739 current_block == block and we mustn't do this, it
3740 will cause infinite recursion. */
3741 if (block
!= current_block
)
3743 BLOCK_SUPERCONTEXT (block
) = current_block
;
3744 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
3745 BLOCK_SUBBLOCKS (current_block
) = block
;
3746 current_block
= block
;
3748 VARRAY_PUSH_TREE (*p_block_stack
, block
);
3750 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
3752 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (*p_block_stack
);
3753 VARRAY_POP (*p_block_stack
);
3754 BLOCK_SUBBLOCKS (current_block
)
3755 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
3756 current_block
= BLOCK_SUPERCONTEXT (current_block
);
3762 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3763 appears in the block tree, select one of the fragments to become
3764 the new origin block. */
3767 reorder_fix_fragments (tree block
)
3771 tree dup_origin
= BLOCK_FRAGMENT_ORIGIN (block
);
3772 tree new_origin
= NULL_TREE
;
3776 if (! TREE_ASM_WRITTEN (dup_origin
))
3778 new_origin
= BLOCK_FRAGMENT_CHAIN (dup_origin
);
3780 /* Find the first of the remaining fragments. There must
3781 be at least one -- the current block. */
3782 while (! TREE_ASM_WRITTEN (new_origin
))
3783 new_origin
= BLOCK_FRAGMENT_CHAIN (new_origin
);
3784 BLOCK_FRAGMENT_ORIGIN (new_origin
) = NULL_TREE
;
3787 else if (! dup_origin
)
3790 /* Re-root the rest of the fragments to the new origin. In the
3791 case that DUP_ORIGIN was null, that means BLOCK was the origin
3792 of a chain of fragments and we want to remove those fragments
3793 that didn't make it to the output. */
3796 tree
*pp
= &BLOCK_FRAGMENT_CHAIN (new_origin
);
3801 if (TREE_ASM_WRITTEN (chain
))
3803 BLOCK_FRAGMENT_ORIGIN (chain
) = new_origin
;
3805 pp
= &BLOCK_FRAGMENT_CHAIN (chain
);
3807 chain
= BLOCK_FRAGMENT_CHAIN (chain
);
3812 reorder_fix_fragments (BLOCK_SUBBLOCKS (block
));
3813 block
= BLOCK_CHAIN (block
);
3817 /* Reverse the order of elements in the chain T of blocks,
3818 and return the new head of the chain (old last element). */
3821 blocks_nreverse (tree t
)
3823 tree prev
= 0, decl
, next
;
3824 for (decl
= t
; decl
; decl
= next
)
3826 next
= BLOCK_CHAIN (decl
);
3827 BLOCK_CHAIN (decl
) = prev
;
3833 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3834 non-NULL, list them all into VECTOR, in a depth-first preorder
3835 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3839 all_blocks (tree block
, tree
*vector
)
3845 TREE_ASM_WRITTEN (block
) = 0;
3847 /* Record this block. */
3849 vector
[n_blocks
] = block
;
3853 /* Record the subblocks, and their subblocks... */
3854 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
3855 vector
? vector
+ n_blocks
: 0);
3856 block
= BLOCK_CHAIN (block
);
3862 /* Return a vector containing all the blocks rooted at BLOCK. The
3863 number of elements in the vector is stored in N_BLOCKS_P. The
3864 vector is dynamically allocated; it is the caller's responsibility
3865 to call `free' on the pointer returned. */
3868 get_block_vector (tree block
, int *n_blocks_p
)
3872 *n_blocks_p
= all_blocks (block
, NULL
);
3873 block_vector
= xmalloc (*n_blocks_p
* sizeof (tree
));
3874 all_blocks (block
, block_vector
);
3876 return block_vector
;
3879 static GTY(()) int next_block_index
= 2;
3881 /* Set BLOCK_NUMBER for all the blocks in FN. */
3884 number_blocks (tree fn
)
3890 /* For SDB and XCOFF debugging output, we start numbering the blocks
3891 from 1 within each function, rather than keeping a running
3893 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3894 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
3895 next_block_index
= 1;
3898 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
3900 /* The top-level BLOCK isn't numbered at all. */
3901 for (i
= 1; i
< n_blocks
; ++i
)
3902 /* We number the blocks from two. */
3903 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
3905 free (block_vector
);
3910 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3913 debug_find_var_in_block_tree (tree var
, tree block
)
3917 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
3921 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
3923 tree ret
= debug_find_var_in_block_tree (var
, t
);
3931 /* Allocate a function structure for FNDECL and set its contents
3935 allocate_struct_function (tree fndecl
)
3938 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
3940 cfun
= ggc_alloc_cleared (sizeof (struct function
));
3942 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
3943 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
3945 current_function_funcdef_no
= funcdef_no
++;
3947 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
3949 init_eh_for_function ();
3951 lang_hooks
.function
.init (cfun
);
3952 if (init_machine_status
)
3953 cfun
->machine
= (*init_machine_status
) ();
3958 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
3959 cfun
->decl
= fndecl
;
3961 result
= DECL_RESULT (fndecl
);
3962 if (aggregate_value_p (result
, fndecl
))
3964 #ifdef PCC_STATIC_STRUCT_RETURN
3965 current_function_returns_pcc_struct
= 1;
3967 current_function_returns_struct
= 1;
3970 current_function_returns_pointer
= POINTER_TYPE_P (TREE_TYPE (result
));
3972 current_function_stdarg
3974 && TYPE_ARG_TYPES (fntype
) != 0
3975 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3976 != void_type_node
));
3979 /* Reset cfun, and other non-struct-function variables to defaults as
3980 appropriate for emitting rtl at the start of a function. */
3983 prepare_function_start (tree fndecl
)
3985 if (fndecl
&& DECL_STRUCT_FUNCTION (fndecl
))
3986 cfun
= DECL_STRUCT_FUNCTION (fndecl
);
3988 allocate_struct_function (fndecl
);
3990 init_varasm_status (cfun
);
3993 cse_not_expected
= ! optimize
;
3995 /* Caller save not needed yet. */
3996 caller_save_needed
= 0;
3998 /* We haven't done register allocation yet. */
4001 /* Indicate that we have not instantiated virtual registers yet. */
4002 virtuals_instantiated
= 0;
4004 /* Indicate that we want CONCATs now. */
4005 generating_concat_p
= 1;
4007 /* Indicate we have no need of a frame pointer yet. */
4008 frame_pointer_needed
= 0;
4011 /* Initialize the rtl expansion mechanism so that we can do simple things
4012 like generate sequences. This is used to provide a context during global
4013 initialization of some passes. */
4015 init_dummy_function_start (void)
4017 prepare_function_start (NULL
);
4020 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4021 and initialize static variables for generating RTL for the statements
4025 init_function_start (tree subr
)
4027 prepare_function_start (subr
);
4029 /* Prevent ever trying to delete the first instruction of a
4030 function. Also tell final how to output a linenum before the
4031 function prologue. Note linenums could be missing, e.g. when
4032 compiling a Java .class file. */
4033 if (! DECL_IS_BUILTIN (subr
))
4034 emit_line_note (DECL_SOURCE_LOCATION (subr
));
4036 /* Make sure first insn is a note even if we don't want linenums.
4037 This makes sure the first insn will never be deleted.
4038 Also, final expects a note to appear there. */
4039 emit_note (NOTE_INSN_DELETED
);
4041 /* Warn if this value is an aggregate type,
4042 regardless of which calling convention we are using for it. */
4043 if (warn_aggregate_return
4044 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4045 warning ("function returns an aggregate");
4048 /* Make sure all values used by the optimization passes have sane
4051 init_function_for_compilation (void)
4055 /* No prologue/epilogue insns yet. */
4056 VARRAY_GROW (prologue
, 0);
4057 VARRAY_GROW (epilogue
, 0);
4058 VARRAY_GROW (sibcall_epilogue
, 0);
4061 /* Expand a call to __main at the beginning of a possible main function. */
4063 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4064 #undef HAS_INIT_SECTION
4065 #define HAS_INIT_SECTION
4069 expand_main_function (void)
4071 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
4072 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
)
4074 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
4078 /* Forcibly align the stack. */
4079 #ifdef STACK_GROWS_DOWNWARD
4080 tmp
= expand_simple_binop (Pmode
, AND
, stack_pointer_rtx
, GEN_INT(-align
),
4081 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
4083 tmp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
4084 GEN_INT (align
- 1), NULL_RTX
, 1, OPTAB_WIDEN
);
4085 tmp
= expand_simple_binop (Pmode
, AND
, tmp
, GEN_INT (-align
),
4086 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
4088 if (tmp
!= stack_pointer_rtx
)
4089 emit_move_insn (stack_pointer_rtx
, tmp
);
4091 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
4092 tmp
= force_reg (Pmode
, const0_rtx
);
4093 allocate_dynamic_stack_space (tmp
, NULL_RTX
, BIGGEST_ALIGNMENT
);
4097 for (tmp
= get_last_insn (); tmp
; tmp
= PREV_INSN (tmp
))
4098 if (NOTE_P (tmp
) && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_FUNCTION_BEG
)
4101 emit_insn_before (seq
, tmp
);
4107 #ifndef HAS_INIT_SECTION
4108 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4112 /* Start the RTL for a new function, and set variables used for
4114 SUBR is the FUNCTION_DECL node.
4115 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4116 the function's parameters, which must be run at any return statement. */
4119 expand_function_start (tree subr
)
4121 /* Make sure volatile mem refs aren't considered
4122 valid operands of arithmetic insns. */
4123 init_recog_no_volatile ();
4125 current_function_profile
4127 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4129 current_function_limit_stack
4130 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4132 /* Make the label for return statements to jump to. Do not special
4133 case machines with special return instructions -- they will be
4134 handled later during jump, ifcvt, or epilogue creation. */
4135 return_label
= gen_label_rtx ();
4137 /* Initialize rtx used to return the value. */
4138 /* Do this before assign_parms so that we copy the struct value address
4139 before any library calls that assign parms might generate. */
4141 /* Decide whether to return the value in memory or in a register. */
4142 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4144 /* Returning something that won't go in a register. */
4145 rtx value_address
= 0;
4147 #ifdef PCC_STATIC_STRUCT_RETURN
4148 if (current_function_returns_pcc_struct
)
4150 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4151 value_address
= assemble_static_space (size
);
4156 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 1);
4157 /* Expect to be passed the address of a place to store the value.
4158 If it is passed as an argument, assign_parms will take care of
4162 value_address
= gen_reg_rtx (Pmode
);
4163 emit_move_insn (value_address
, sv
);
4168 rtx x
= value_address
;
4169 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4171 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4172 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4174 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4177 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4178 /* If return mode is void, this decl rtl should not be used. */
4179 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4182 /* Compute the return values into a pseudo reg, which we will copy
4183 into the true return register after the cleanups are done. */
4184 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4185 if (TYPE_MODE (return_type
) != BLKmode
4186 && targetm
.calls
.return_in_msb (return_type
))
4187 /* expand_function_end will insert the appropriate padding in
4188 this case. Use the return value's natural (unpadded) mode
4189 within the function proper. */
4190 SET_DECL_RTL (DECL_RESULT (subr
),
4191 gen_reg_rtx (TYPE_MODE (return_type
)));
4194 /* In order to figure out what mode to use for the pseudo, we
4195 figure out what the mode of the eventual return register will
4196 actually be, and use that. */
4197 rtx hard_reg
= hard_function_value (return_type
, subr
, 1);
4199 /* Structures that are returned in registers are not
4200 aggregate_value_p, so we may see a PARALLEL or a REG. */
4201 if (REG_P (hard_reg
))
4202 SET_DECL_RTL (DECL_RESULT (subr
),
4203 gen_reg_rtx (GET_MODE (hard_reg
)));
4206 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4207 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4211 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4212 result to the real return register(s). */
4213 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4216 /* Initialize rtx for parameters and local variables.
4217 In some cases this requires emitting insns. */
4218 assign_parms (subr
);
4220 /* If function gets a static chain arg, store it. */
4221 if (cfun
->static_chain_decl
)
4223 tree parm
= cfun
->static_chain_decl
;
4224 rtx local
= gen_reg_rtx (Pmode
);
4226 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
);
4227 SET_DECL_RTL (parm
, local
);
4228 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4230 emit_move_insn (local
, static_chain_incoming_rtx
);
4233 /* If the function receives a non-local goto, then store the
4234 bits we need to restore the frame pointer. */
4235 if (cfun
->nonlocal_goto_save_area
)
4240 /* ??? We need to do this save early. Unfortunately here is
4241 before the frame variable gets declared. Help out... */
4242 expand_var (TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0));
4244 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4245 cfun
->nonlocal_goto_save_area
,
4246 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4247 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4248 r_save
= convert_memory_address (Pmode
, r_save
);
4250 emit_move_insn (r_save
, virtual_stack_vars_rtx
);
4251 update_nonlocal_goto_save_area ();
4254 /* The following was moved from init_function_start.
4255 The move is supposed to make sdb output more accurate. */
4256 /* Indicate the beginning of the function body,
4257 as opposed to parm setup. */
4258 emit_note (NOTE_INSN_FUNCTION_BEG
);
4260 if (!NOTE_P (get_last_insn ()))
4261 emit_note (NOTE_INSN_DELETED
);
4262 parm_birth_insn
= get_last_insn ();
4264 if (current_function_profile
)
4267 PROFILE_HOOK (current_function_funcdef_no
);
4271 /* After the display initializations is where the tail-recursion label
4272 should go, if we end up needing one. Ensure we have a NOTE here
4273 since some things (like trampolines) get placed before this. */
4274 tail_recursion_reentry
= emit_note (NOTE_INSN_DELETED
);
4276 /* Make sure there is a line number after the function entry setup code. */
4277 force_next_line_note ();
4280 /* Undo the effects of init_dummy_function_start. */
4282 expand_dummy_function_end (void)
4284 /* End any sequences that failed to be closed due to syntax errors. */
4285 while (in_sequence_p ())
4288 /* Outside function body, can't compute type's actual size
4289 until next function's body starts. */
4291 free_after_parsing (cfun
);
4292 free_after_compilation (cfun
);
4296 /* Call DOIT for each hard register used as a return value from
4297 the current function. */
4300 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4302 rtx outgoing
= current_function_return_rtx
;
4307 if (REG_P (outgoing
))
4308 (*doit
) (outgoing
, arg
);
4309 else if (GET_CODE (outgoing
) == PARALLEL
)
4313 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4315 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4317 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4324 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4326 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
4330 clobber_return_register (void)
4332 diddle_return_value (do_clobber_return_reg
, NULL
);
4334 /* In case we do use pseudo to return value, clobber it too. */
4335 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4337 tree decl_result
= DECL_RESULT (current_function_decl
);
4338 rtx decl_rtl
= DECL_RTL (decl_result
);
4339 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4341 do_clobber_return_reg (decl_rtl
, NULL
);
4347 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4349 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
4353 use_return_register (void)
4355 diddle_return_value (do_use_return_reg
, NULL
);
4358 /* Possibly warn about unused parameters. */
4360 do_warn_unused_parameter (tree fn
)
4364 for (decl
= DECL_ARGUMENTS (fn
);
4365 decl
; decl
= TREE_CHAIN (decl
))
4366 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4367 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
))
4368 warning ("%Junused parameter %qD", decl
, decl
);
4371 static GTY(()) rtx initial_trampoline
;
4373 /* Generate RTL for the end of the current function. */
4376 expand_function_end (void)
4380 /* If arg_pointer_save_area was referenced only from a nested
4381 function, we will not have initialized it yet. Do that now. */
4382 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
4383 get_arg_pointer_save_area (cfun
);
4385 /* If we are doing stack checking and this function makes calls,
4386 do a stack probe at the start of the function to ensure we have enough
4387 space for another stack frame. */
4388 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
4392 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4396 probe_stack_range (STACK_CHECK_PROTECT
,
4397 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
4400 emit_insn_before (seq
, tail_recursion_reentry
);
4405 /* Possibly warn about unused parameters.
4406 When frontend does unit-at-a-time, the warning is already
4407 issued at finalization time. */
4408 if (warn_unused_parameter
4409 && !lang_hooks
.callgraph
.expand_function
)
4410 do_warn_unused_parameter (current_function_decl
);
4412 /* End any sequences that failed to be closed due to syntax errors. */
4413 while (in_sequence_p ())
4416 clear_pending_stack_adjust ();
4417 do_pending_stack_adjust ();
4419 /* @@@ This is a kludge. We want to ensure that instructions that
4420 may trap are not moved into the epilogue by scheduling, because
4421 we don't always emit unwind information for the epilogue.
4422 However, not all machine descriptions define a blockage insn, so
4423 emit an ASM_INPUT to act as one. */
4424 if (flag_non_call_exceptions
)
4425 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
4427 /* Mark the end of the function body.
4428 If control reaches this insn, the function can drop through
4429 without returning a value. */
4430 emit_note (NOTE_INSN_FUNCTION_END
);
4432 /* Must mark the last line number note in the function, so that the test
4433 coverage code can avoid counting the last line twice. This just tells
4434 the code to ignore the immediately following line note, since there
4435 already exists a copy of this note somewhere above. This line number
4436 note is still needed for debugging though, so we can't delete it. */
4437 if (flag_test_coverage
)
4438 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER
);
4440 /* Output a linenumber for the end of the function.
4441 SDB depends on this. */
4442 force_next_line_note ();
4443 emit_line_note (input_location
);
4445 /* Before the return label (if any), clobber the return
4446 registers so that they are not propagated live to the rest of
4447 the function. This can only happen with functions that drop
4448 through; if there had been a return statement, there would
4449 have either been a return rtx, or a jump to the return label.
4451 We delay actual code generation after the current_function_value_rtx
4453 clobber_after
= get_last_insn ();
4455 /* Output the label for the actual return from the function. */
4456 emit_label (return_label
);
4458 /* Let except.c know where it should emit the call to unregister
4459 the function context for sjlj exceptions. */
4460 if (flag_exceptions
&& USING_SJLJ_EXCEPTIONS
)
4461 sjlj_emit_function_exit_after (get_last_insn ());
4463 /* If scalar return value was computed in a pseudo-reg, or was a named
4464 return value that got dumped to the stack, copy that to the hard
4466 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4468 tree decl_result
= DECL_RESULT (current_function_decl
);
4469 rtx decl_rtl
= DECL_RTL (decl_result
);
4471 if (REG_P (decl_rtl
)
4472 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4473 : DECL_REGISTER (decl_result
))
4475 rtx real_decl_rtl
= current_function_return_rtx
;
4477 /* This should be set in assign_parms. */
4478 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4480 /* If this is a BLKmode structure being returned in registers,
4481 then use the mode computed in expand_return. Note that if
4482 decl_rtl is memory, then its mode may have been changed,
4483 but that current_function_return_rtx has not. */
4484 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4485 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4487 /* If a non-BLKmode return value should be padded at the least
4488 significant end of the register, shift it left by the appropriate
4489 amount. BLKmode results are handled using the group load/store
4491 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
4492 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
4494 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
4495 REGNO (real_decl_rtl
)),
4497 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
4499 /* If a named return value dumped decl_return to memory, then
4500 we may need to re-do the PROMOTE_MODE signed/unsigned
4502 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4504 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4506 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
4507 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
4510 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4512 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4514 /* If expand_function_start has created a PARALLEL for decl_rtl,
4515 move the result to the real return registers. Otherwise, do
4516 a group load from decl_rtl for a named return. */
4517 if (GET_CODE (decl_rtl
) == PARALLEL
)
4518 emit_group_move (real_decl_rtl
, decl_rtl
);
4520 emit_group_load (real_decl_rtl
, decl_rtl
,
4521 TREE_TYPE (decl_result
),
4522 int_size_in_bytes (TREE_TYPE (decl_result
)));
4525 emit_move_insn (real_decl_rtl
, decl_rtl
);
4529 /* If returning a structure, arrange to return the address of the value
4530 in a place where debuggers expect to find it.
4532 If returning a structure PCC style,
4533 the caller also depends on this value.
4534 And current_function_returns_pcc_struct is not necessarily set. */
4535 if (current_function_returns_struct
4536 || current_function_returns_pcc_struct
)
4538 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
4539 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4542 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
4543 type
= TREE_TYPE (type
);
4545 value_address
= XEXP (value_address
, 0);
4547 #ifdef FUNCTION_OUTGOING_VALUE
4548 outgoing
= FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
4549 current_function_decl
);
4551 outgoing
= FUNCTION_VALUE (build_pointer_type (type
),
4552 current_function_decl
);
4555 /* Mark this as a function return value so integrate will delete the
4556 assignment and USE below when inlining this function. */
4557 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4559 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4560 value_address
= convert_memory_address (GET_MODE (outgoing
),
4563 emit_move_insn (outgoing
, value_address
);
4565 /* Show return register used to hold result (in this case the address
4567 current_function_return_rtx
= outgoing
;
4570 /* If this is an implementation of throw, do what's necessary to
4571 communicate between __builtin_eh_return and the epilogue. */
4572 expand_eh_return ();
4574 /* Emit the actual code to clobber return register. */
4579 clobber_return_register ();
4580 expand_naked_return ();
4584 emit_insn_after (seq
, clobber_after
);
4587 /* Output the label for the naked return from the function. */
4588 emit_label (naked_return_label
);
4590 /* If we had calls to alloca, and this machine needs
4591 an accurate stack pointer to exit the function,
4592 insert some code to save and restore the stack pointer. */
4593 if (! EXIT_IGNORE_STACK
4594 && current_function_calls_alloca
)
4598 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
4599 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
4602 /* ??? This should no longer be necessary since stupid is no longer with
4603 us, but there are some parts of the compiler (eg reload_combine, and
4604 sh mach_dep_reorg) that still try and compute their own lifetime info
4605 instead of using the general framework. */
4606 use_return_register ();
4610 get_arg_pointer_save_area (struct function
*f
)
4612 rtx ret
= f
->x_arg_pointer_save_area
;
4616 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
4617 f
->x_arg_pointer_save_area
= ret
;
4620 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
4624 /* Save the arg pointer at the beginning of the function. The
4625 generated stack slot may not be a valid memory address, so we
4626 have to check it and fix it if necessary. */
4628 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
4632 push_topmost_sequence ();
4633 emit_insn_after (seq
, entry_of_function ());
4634 pop_topmost_sequence ();
4640 /* Extend a vector that records the INSN_UIDs of INSNS
4641 (a list of one or more insns). */
4644 record_insns (rtx insns
, varray_type
*vecp
)
4651 while (tmp
!= NULL_RTX
)
4654 tmp
= NEXT_INSN (tmp
);
4657 i
= VARRAY_SIZE (*vecp
);
4658 VARRAY_GROW (*vecp
, i
+ len
);
4660 while (tmp
!= NULL_RTX
)
4662 VARRAY_INT (*vecp
, i
) = INSN_UID (tmp
);
4664 tmp
= NEXT_INSN (tmp
);
4668 /* Set the locator of the insn chain starting at INSN to LOC. */
4670 set_insn_locators (rtx insn
, int loc
)
4672 while (insn
!= NULL_RTX
)
4675 INSN_LOCATOR (insn
) = loc
;
4676 insn
= NEXT_INSN (insn
);
4680 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4681 be running after reorg, SEQUENCE rtl is possible. */
4684 contains (rtx insn
, varray_type vec
)
4688 if (NONJUMP_INSN_P (insn
)
4689 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4692 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
4693 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
4694 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == VARRAY_INT (vec
, j
))
4700 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
4701 if (INSN_UID (insn
) == VARRAY_INT (vec
, j
))
4708 prologue_epilogue_contains (rtx insn
)
4710 if (contains (insn
, prologue
))
4712 if (contains (insn
, epilogue
))
4718 sibcall_epilogue_contains (rtx insn
)
4720 if (sibcall_epilogue
)
4721 return contains (insn
, sibcall_epilogue
);
4726 /* Insert gen_return at the end of block BB. This also means updating
4727 block_for_insn appropriately. */
4730 emit_return_into_block (basic_block bb
, rtx line_note
)
4732 emit_jump_insn_after (gen_return (), BB_END (bb
));
4734 emit_note_copy_after (line_note
, PREV_INSN (BB_END (bb
)));
4736 #endif /* HAVE_return */
4738 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4740 /* These functions convert the epilogue into a variant that does not modify the
4741 stack pointer. This is used in cases where a function returns an object
4742 whose size is not known until it is computed. The called function leaves the
4743 object on the stack, leaves the stack depressed, and returns a pointer to
4746 What we need to do is track all modifications and references to the stack
4747 pointer, deleting the modifications and changing the references to point to
4748 the location the stack pointer would have pointed to had the modifications
4751 These functions need to be portable so we need to make as few assumptions
4752 about the epilogue as we can. However, the epilogue basically contains
4753 three things: instructions to reset the stack pointer, instructions to
4754 reload registers, possibly including the frame pointer, and an
4755 instruction to return to the caller.
4757 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4758 We also make no attempt to validate the insns we make since if they are
4759 invalid, we probably can't do anything valid. The intent is that these
4760 routines get "smarter" as more and more machines start to use them and
4761 they try operating on different epilogues.
4763 We use the following structure to track what the part of the epilogue that
4764 we've already processed has done. We keep two copies of the SP equivalence,
4765 one for use during the insn we are processing and one for use in the next
4766 insn. The difference is because one part of a PARALLEL may adjust SP
4767 and the other may use it. */
4771 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
4772 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
4773 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
4774 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
4775 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
4776 should be set to once we no longer need
4778 rtx const_equiv
[FIRST_PSEUDO_REGISTER
]; /* Any known constant equivalences
4782 static void handle_epilogue_set (rtx
, struct epi_info
*);
4783 static void update_epilogue_consts (rtx
, rtx
, void *);
4784 static void emit_equiv_load (struct epi_info
*);
4786 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4787 no modifications to the stack pointer. Return the new list of insns. */
4790 keep_stack_depressed (rtx insns
)
4793 struct epi_info info
;
4796 /* If the epilogue is just a single instruction, it must be OK as is. */
4797 if (NEXT_INSN (insns
) == NULL_RTX
)
4800 /* Otherwise, start a sequence, initialize the information we have, and
4801 process all the insns we were given. */
4804 info
.sp_equiv_reg
= stack_pointer_rtx
;
4806 info
.equiv_reg_src
= 0;
4808 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
4809 info
.const_equiv
[j
] = 0;
4813 while (insn
!= NULL_RTX
)
4815 next
= NEXT_INSN (insn
);
4824 /* If this insn references the register that SP is equivalent to and
4825 we have a pending load to that register, we must force out the load
4826 first and then indicate we no longer know what SP's equivalent is. */
4827 if (info
.equiv_reg_src
!= 0
4828 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
4830 emit_equiv_load (&info
);
4831 info
.sp_equiv_reg
= 0;
4834 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
4835 info
.new_sp_offset
= info
.sp_offset
;
4837 /* If this is a (RETURN) and the return address is on the stack,
4838 update the address and change to an indirect jump. */
4839 if (GET_CODE (PATTERN (insn
)) == RETURN
4840 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4841 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
4843 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
4845 HOST_WIDE_INT offset
= 0;
4846 rtx jump_insn
, jump_set
;
4848 /* If the return address is in a register, we can emit the insn
4849 unchanged. Otherwise, it must be a MEM and we see what the
4850 base register and offset are. In any case, we have to emit any
4851 pending load to the equivalent reg of SP, if any. */
4852 if (REG_P (retaddr
))
4854 emit_equiv_load (&info
);
4862 gcc_assert (MEM_P (retaddr
));
4864 ret_ptr
= XEXP (retaddr
, 0);
4866 if (REG_P (ret_ptr
))
4868 base
= gen_rtx_REG (Pmode
, REGNO (ret_ptr
));
4873 gcc_assert (GET_CODE (ret_ptr
) == PLUS
4874 && REG_P (XEXP (ret_ptr
, 0))
4875 && GET_CODE (XEXP (ret_ptr
, 1)) == CONST_INT
);
4876 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (ret_ptr
, 0)));
4877 offset
= INTVAL (XEXP (ret_ptr
, 1));
4881 /* If the base of the location containing the return pointer
4882 is SP, we must update it with the replacement address. Otherwise,
4883 just build the necessary MEM. */
4884 retaddr
= plus_constant (base
, offset
);
4885 if (base
== stack_pointer_rtx
)
4886 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
4887 plus_constant (info
.sp_equiv_reg
,
4890 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
4892 /* If there is a pending load to the equivalent register for SP
4893 and we reference that register, we must load our address into
4894 a scratch register and then do that load. */
4895 if (info
.equiv_reg_src
4896 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
4901 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
4902 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
4903 && !fixed_regs
[regno
]
4904 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
4905 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR
->global_live_at_start
,
4907 && !refers_to_regno_p (regno
,
4908 regno
+ hard_regno_nregs
[regno
]
4910 info
.equiv_reg_src
, NULL
)
4911 && info
.const_equiv
[regno
] == 0)
4914 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
4916 reg
= gen_rtx_REG (Pmode
, regno
);
4917 emit_move_insn (reg
, retaddr
);
4921 emit_equiv_load (&info
);
4922 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
4924 /* Show the SET in the above insn is a RETURN. */
4925 jump_set
= single_set (jump_insn
);
4926 gcc_assert (jump_set
);
4927 SET_IS_RETURN_P (jump_set
) = 1;
4930 /* If SP is not mentioned in the pattern and its equivalent register, if
4931 any, is not modified, just emit it. Otherwise, if neither is set,
4932 replace the reference to SP and emit the insn. If none of those are
4933 true, handle each SET individually. */
4934 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
4935 && (info
.sp_equiv_reg
== stack_pointer_rtx
4936 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4938 else if (! reg_set_p (stack_pointer_rtx
, insn
)
4939 && (info
.sp_equiv_reg
== stack_pointer_rtx
4940 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4944 changed
= validate_replace_rtx (stack_pointer_rtx
,
4945 plus_constant (info
.sp_equiv_reg
,
4948 gcc_assert (changed
);
4952 else if (GET_CODE (PATTERN (insn
)) == SET
)
4953 handle_epilogue_set (PATTERN (insn
), &info
);
4954 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
4956 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
4957 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
4958 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
4963 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
4964 info
.sp_offset
= info
.new_sp_offset
;
4966 /* Now update any constants this insn sets. */
4967 note_stores (PATTERN (insn
), update_epilogue_consts
, &info
);
4971 insns
= get_insns ();
4976 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4977 structure that contains information about what we've seen so far. We
4978 process this SET by either updating that data or by emitting one or
4982 handle_epilogue_set (rtx set
, struct epi_info
*p
)
4984 /* First handle the case where we are setting SP. Record what it is being
4985 set from. If unknown, abort. */
4986 if (reg_set_p (stack_pointer_rtx
, set
))
4988 gcc_assert (SET_DEST (set
) == stack_pointer_rtx
);
4990 if (GET_CODE (SET_SRC (set
)) == PLUS
)
4992 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
4993 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
4994 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
4997 gcc_assert (REG_P (XEXP (SET_SRC (set
), 1))
4998 && (REGNO (XEXP (SET_SRC (set
), 1))
4999 < FIRST_PSEUDO_REGISTER
)
5000 && p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
5002 = INTVAL (p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
5006 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
5008 /* If we are adjusting SP, we adjust from the old data. */
5009 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
5011 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
5012 p
->new_sp_offset
+= p
->sp_offset
;
5015 gcc_assert (p
->new_sp_equiv_reg
&& REG_P (p
->new_sp_equiv_reg
));
5020 /* Next handle the case where we are setting SP's equivalent register.
5021 If we already have a value to set it to, abort. We could update, but
5022 there seems little point in handling that case. Note that we have
5023 to allow for the case where we are setting the register set in
5024 the previous part of a PARALLEL inside a single insn. But use the
5025 old offset for any updates within this insn. We must allow for the case
5026 where the register is being set in a different (usually wider) mode than
5028 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
5030 gcc_assert (!p
->equiv_reg_src
5031 && REG_P (p
->new_sp_equiv_reg
)
5032 && REG_P (SET_DEST (set
))
5033 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
)))
5035 && REGNO (p
->new_sp_equiv_reg
) == REGNO (SET_DEST (set
)));
5037 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
5038 plus_constant (p
->sp_equiv_reg
,
5042 /* Otherwise, replace any references to SP in the insn to its new value
5043 and emit the insn. */
5046 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
5047 plus_constant (p
->sp_equiv_reg
,
5049 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
5050 plus_constant (p
->sp_equiv_reg
,
5056 /* Update the tracking information for registers set to constants. */
5059 update_epilogue_consts (rtx dest
, rtx x
, void *data
)
5061 struct epi_info
*p
= (struct epi_info
*) data
;
5064 if (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
5067 /* If we are either clobbering a register or doing a partial set,
5068 show we don't know the value. */
5069 else if (GET_CODE (x
) == CLOBBER
|| ! rtx_equal_p (dest
, SET_DEST (x
)))
5070 p
->const_equiv
[REGNO (dest
)] = 0;
5072 /* If we are setting it to a constant, record that constant. */
5073 else if (GET_CODE (SET_SRC (x
)) == CONST_INT
)
5074 p
->const_equiv
[REGNO (dest
)] = SET_SRC (x
);
5076 /* If this is a binary operation between a register we have been tracking
5077 and a constant, see if we can compute a new constant value. */
5078 else if (ARITHMETIC_P (SET_SRC (x
))
5079 && REG_P (XEXP (SET_SRC (x
), 0))
5080 && REGNO (XEXP (SET_SRC (x
), 0)) < FIRST_PSEUDO_REGISTER
5081 && p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))] != 0
5082 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
5083 && 0 != (new = simplify_binary_operation
5084 (GET_CODE (SET_SRC (x
)), GET_MODE (dest
),
5085 p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))],
5086 XEXP (SET_SRC (x
), 1)))
5087 && GET_CODE (new) == CONST_INT
)
5088 p
->const_equiv
[REGNO (dest
)] = new;
5090 /* Otherwise, we can't do anything with this value. */
5092 p
->const_equiv
[REGNO (dest
)] = 0;
5095 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5098 emit_equiv_load (struct epi_info
*p
)
5100 if (p
->equiv_reg_src
!= 0)
5102 rtx dest
= p
->sp_equiv_reg
;
5104 if (GET_MODE (p
->equiv_reg_src
) != GET_MODE (dest
))
5105 dest
= gen_rtx_REG (GET_MODE (p
->equiv_reg_src
),
5106 REGNO (p
->sp_equiv_reg
));
5108 emit_move_insn (dest
, p
->equiv_reg_src
);
5109 p
->equiv_reg_src
= 0;
5114 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5115 this into place with notes indicating where the prologue ends and where
5116 the epilogue begins. Update the basic block information when possible. */
5119 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED
)
5123 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5126 #ifdef HAVE_prologue
5127 rtx prologue_end
= NULL_RTX
;
5129 #if defined (HAVE_epilogue) || defined(HAVE_return)
5130 rtx epilogue_end
= NULL_RTX
;
5134 #ifdef HAVE_prologue
5138 seq
= gen_prologue ();
5141 /* Retain a map of the prologue insns. */
5142 record_insns (seq
, &prologue
);
5143 prologue_end
= emit_note (NOTE_INSN_PROLOGUE_END
);
5147 set_insn_locators (seq
, prologue_locator
);
5149 /* Can't deal with multiple successors of the entry block
5150 at the moment. Function should always have at least one
5152 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
5154 insert_insn_on_edge (seq
, EDGE_SUCC (ENTRY_BLOCK_PTR
, 0));
5159 /* If the exit block has no non-fake predecessors, we don't need
5161 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5162 if ((e
->flags
& EDGE_FAKE
) == 0)
5168 if (optimize
&& HAVE_return
)
5170 /* If we're allowed to generate a simple return instruction,
5171 then by definition we don't need a full epilogue. Examine
5172 the block that falls through to EXIT. If it does not
5173 contain any code, examine its predecessors and try to
5174 emit (conditional) return instructions. */
5179 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5180 if (e
->flags
& EDGE_FALLTHRU
)
5186 /* Verify that there are no active instructions in the last block. */
5187 label
= BB_END (last
);
5188 while (label
&& !LABEL_P (label
))
5190 if (active_insn_p (label
))
5192 label
= PREV_INSN (label
);
5195 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
5198 rtx epilogue_line_note
= NULL_RTX
;
5200 /* Locate the line number associated with the closing brace,
5201 if we can find one. */
5202 for (seq
= get_last_insn ();
5203 seq
&& ! active_insn_p (seq
);
5204 seq
= PREV_INSN (seq
))
5205 if (NOTE_P (seq
) && NOTE_LINE_NUMBER (seq
) > 0)
5207 epilogue_line_note
= seq
;
5211 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
5213 basic_block bb
= e
->src
;
5216 if (bb
== ENTRY_BLOCK_PTR
)
5223 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5229 /* If we have an unconditional jump, we can replace that
5230 with a simple return instruction. */
5231 if (simplejump_p (jump
))
5233 emit_return_into_block (bb
, epilogue_line_note
);
5237 /* If we have a conditional jump, we can try to replace
5238 that with a conditional return instruction. */
5239 else if (condjump_p (jump
))
5241 if (! redirect_jump (jump
, 0, 0))
5247 /* If this block has only one successor, it both jumps
5248 and falls through to the fallthru block, so we can't
5250 if (EDGE_COUNT (bb
->succs
) == 1)
5262 /* Fix up the CFG for the successful change we just made. */
5263 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5266 /* Emit a return insn for the exit fallthru block. Whether
5267 this is still reachable will be determined later. */
5269 emit_barrier_after (BB_END (last
));
5270 emit_return_into_block (last
, epilogue_line_note
);
5271 epilogue_end
= BB_END (last
);
5272 EDGE_SUCC (last
, 0)->flags
&= ~EDGE_FALLTHRU
;
5277 /* Find the edge that falls through to EXIT. Other edges may exist
5278 due to RETURN instructions, but those don't need epilogues.
5279 There really shouldn't be a mixture -- either all should have
5280 been converted or none, however... */
5282 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5283 if (e
->flags
& EDGE_FALLTHRU
)
5288 #ifdef HAVE_epilogue
5292 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5294 seq
= gen_epilogue ();
5296 #ifdef INCOMING_RETURN_ADDR_RTX
5297 /* If this function returns with the stack depressed and we can support
5298 it, massage the epilogue to actually do that. */
5299 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
5300 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
5301 seq
= keep_stack_depressed (seq
);
5304 emit_jump_insn (seq
);
5306 /* Retain a map of the epilogue insns. */
5307 record_insns (seq
, &epilogue
);
5308 set_insn_locators (seq
, epilogue_locator
);
5313 insert_insn_on_edge (seq
, e
);
5321 if (! next_active_insn (BB_END (e
->src
)))
5323 /* We have a fall-through edge to the exit block, the source is not
5324 at the end of the function, and there will be an assembler epilogue
5325 at the end of the function.
5326 We can't use force_nonfallthru here, because that would try to
5327 use return. Inserting a jump 'by hand' is extremely messy, so
5328 we take advantage of cfg_layout_finalize using
5329 fixup_fallthru_exit_predecessor. */
5330 cfg_layout_initialize (0);
5331 FOR_EACH_BB (cur_bb
)
5332 if (cur_bb
->index
>= 0 && cur_bb
->next_bb
->index
>= 0)
5333 cur_bb
->rbi
->next
= cur_bb
->next_bb
;
5334 cfg_layout_finalize ();
5339 commit_edge_insertions ();
5341 #ifdef HAVE_sibcall_epilogue
5342 /* Emit sibling epilogues before any sibling call sites. */
5343 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5345 basic_block bb
= e
->src
;
5346 rtx insn
= BB_END (bb
);
5351 || ! SIBLING_CALL_P (insn
))
5358 emit_insn (gen_sibcall_epilogue ());
5362 /* Retain a map of the epilogue insns. Used in life analysis to
5363 avoid getting rid of sibcall epilogue insns. Do this before we
5364 actually emit the sequence. */
5365 record_insns (seq
, &sibcall_epilogue
);
5366 set_insn_locators (seq
, epilogue_locator
);
5368 i
= PREV_INSN (insn
);
5369 newinsn
= emit_insn_before (seq
, insn
);
5374 #ifdef HAVE_prologue
5375 /* This is probably all useless now that we use locators. */
5380 /* GDB handles `break f' by setting a breakpoint on the first
5381 line note after the prologue. Which means (1) that if
5382 there are line number notes before where we inserted the
5383 prologue we should move them, and (2) we should generate a
5384 note before the end of the first basic block, if there isn't
5387 ??? This behavior is completely broken when dealing with
5388 multiple entry functions. We simply place the note always
5389 into first basic block and let alternate entry points
5393 for (insn
= prologue_end
; insn
; insn
= prev
)
5395 prev
= PREV_INSN (insn
);
5396 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5398 /* Note that we cannot reorder the first insn in the
5399 chain, since rest_of_compilation relies on that
5400 remaining constant. */
5403 reorder_insns (insn
, insn
, prologue_end
);
5407 /* Find the last line number note in the first block. */
5408 for (insn
= BB_END (ENTRY_BLOCK_PTR
->next_bb
);
5409 insn
!= prologue_end
&& insn
;
5410 insn
= PREV_INSN (insn
))
5411 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5414 /* If we didn't find one, make a copy of the first line number
5418 for (insn
= next_active_insn (prologue_end
);
5420 insn
= PREV_INSN (insn
))
5421 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5423 emit_note_copy_after (insn
, prologue_end
);
5429 #ifdef HAVE_epilogue
5434 /* Similarly, move any line notes that appear after the epilogue.
5435 There is no need, however, to be quite so anal about the existence
5436 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5437 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5439 for (insn
= epilogue_end
; insn
; insn
= next
)
5441 next
= NEXT_INSN (insn
);
5443 && (NOTE_LINE_NUMBER (insn
) > 0
5444 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
5445 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
))
5446 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5452 /* Reposition the prologue-end and epilogue-begin notes after instruction
5453 scheduling and delayed branch scheduling. */
5456 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED
)
5458 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5459 rtx insn
, last
, note
;
5462 if ((len
= VARRAY_SIZE (prologue
)) > 0)
5466 /* Scan from the beginning until we reach the last prologue insn.
5467 We apparently can't depend on basic_block_{head,end} after
5469 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5473 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5476 else if (contains (insn
, prologue
))
5486 /* Find the prologue-end note if we haven't already, and
5487 move it to just after the last prologue insn. */
5490 for (note
= last
; (note
= NEXT_INSN (note
));)
5492 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5496 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5498 last
= NEXT_INSN (last
);
5499 reorder_insns (note
, note
, last
);
5503 if ((len
= VARRAY_SIZE (epilogue
)) > 0)
5507 /* Scan from the end until we reach the first epilogue insn.
5508 We apparently can't depend on basic_block_{head,end} after
5510 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
5514 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5517 else if (contains (insn
, epilogue
))
5527 /* Find the epilogue-begin note if we haven't already, and
5528 move it to just before the first epilogue insn. */
5531 for (note
= insn
; (note
= PREV_INSN (note
));)
5533 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5537 if (PREV_INSN (last
) != note
)
5538 reorder_insns (note
, note
, PREV_INSN (last
));
5541 #endif /* HAVE_prologue or HAVE_epilogue */
5544 /* Called once, at initialization, to initialize function.c. */
5547 init_function_once (void)
5549 VARRAY_INT_INIT (prologue
, 0, "prologue");
5550 VARRAY_INT_INIT (epilogue
, 0, "epilogue");
5551 VARRAY_INT_INIT (sibcall_epilogue
, 0, "sibcall_epilogue");
5554 /* Resets insn_block_boundaries array. */
5557 reset_block_changes (void)
5559 VARRAY_TREE_INIT (cfun
->ib_boundaries_block
, 100, "ib_boundaries_block");
5560 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, NULL_TREE
);
5563 /* Record the boundary for BLOCK. */
5565 record_block_change (tree block
)
5573 last_block
= VARRAY_TOP_TREE (cfun
->ib_boundaries_block
);
5574 VARRAY_POP (cfun
->ib_boundaries_block
);
5576 for (i
= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
); i
< n
; i
++)
5577 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, last_block
);
5579 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, block
);
5582 /* Finishes record of boundaries. */
5583 void finalize_block_changes (void)
5585 record_block_change (DECL_INITIAL (current_function_decl
));
5588 /* For INSN return the BLOCK it belongs to. */
5590 check_block_change (rtx insn
, tree
*block
)
5592 unsigned uid
= INSN_UID (insn
);
5594 if (uid
>= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
))
5597 *block
= VARRAY_TREE (cfun
->ib_boundaries_block
, uid
);
5600 /* Releases the ib_boundaries_block records. */
5602 free_block_changes (void)
5604 cfun
->ib_boundaries_block
= NULL
;
5607 /* Returns the name of the current function. */
5609 current_function_name (void)
5611 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5614 #include "gt-function.h"