1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
98 int current_function_is_leaf
;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 life_analysis has run. */
103 int current_function_sp_is_unchanging
;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs
;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated
;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no
;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function
* (*init_machine_status
) (void);
123 /* The currently compiled function. */
124 struct function
*cfun
= 0;
126 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
127 static VEC(int,heap
) *prologue
;
128 static VEC(int,heap
) *epilogue
;
130 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 static VEC(int,heap
) *sibcall_epilogue
;
134 /* In order to evaluate some expressions, such as function calls returning
135 structures in memory, we need to temporarily allocate stack locations.
136 We record each allocated temporary in the following structure.
138 Associated with each temporary slot is a nesting level. When we pop up
139 one level, all temporaries associated with the previous level are freed.
140 Normally, all temporaries are freed after the execution of the statement
141 in which they were created. However, if we are inside a ({...}) grouping,
142 the result may be in a temporary and hence must be preserved. If the
143 result could be in a temporary, we preserve it if we can determine which
144 one it is in. If we cannot determine which temporary may contain the
145 result, all temporaries are preserved. A temporary is preserved by
146 pretending it was allocated at the previous nesting level.
148 Automatic variables are also assigned temporary slots, at the nesting
149 level where they are defined. They are marked a "kept" so that
150 free_temp_slots will not free them. */
152 struct temp_slot
GTY(())
154 /* Points to next temporary slot. */
155 struct temp_slot
*next
;
156 /* Points to previous temporary slot. */
157 struct temp_slot
*prev
;
159 /* The rtx to used to reference the slot. */
161 /* The rtx used to represent the address if not the address of the
162 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 /* The alignment (in bits) of the slot. */
166 /* The size, in units, of the slot. */
168 /* The type of the object in the slot, or zero if it doesn't correspond
169 to a type. We use this to determine whether a slot can be reused.
170 It can be reused if objects of the type of the new slot will always
171 conflict with objects of the type of the old slot. */
173 /* Nonzero if this temporary is currently in use. */
175 /* Nonzero if this temporary has its address taken. */
177 /* Nesting level at which this slot is being used. */
179 /* Nonzero if this should survive a call to free_temp_slots. */
181 /* The offset of the slot from the frame_pointer, including extra space
182 for alignment. This info is for combine_temp_slots. */
183 HOST_WIDE_INT base_offset
;
184 /* The size of the slot, including extra space for alignment. This
185 info is for combine_temp_slots. */
186 HOST_WIDE_INT full_size
;
189 /* Forward declarations. */
191 static rtx
assign_stack_local_1 (enum machine_mode
, HOST_WIDE_INT
, int,
193 static struct temp_slot
*find_temp_slot_from_address (rtx
);
194 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
195 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
196 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
197 static int all_blocks (tree
, tree
*);
198 static tree
*get_block_vector (tree
, int *);
199 extern tree
debug_find_var_in_block_tree (tree
, tree
);
200 /* We always define `record_insns' even if it's not used so that we
201 can always export `prologue_epilogue_contains'. */
202 static void record_insns (rtx
, VEC(int,heap
) **) ATTRIBUTE_UNUSED
;
203 static int contains (rtx
, VEC(int,heap
) **);
205 static void emit_return_into_block (basic_block
, rtx
);
207 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
208 static rtx
keep_stack_depressed (rtx
);
210 static void prepare_function_start (tree
);
211 static void do_clobber_return_reg (rtx
, void *);
212 static void do_use_return_reg (rtx
, void *);
213 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
215 /* Pointer to chain of `struct function' for containing functions. */
216 struct function
*outer_function_chain
;
218 /* Given a function decl for a containing function,
219 return the `struct function' for it. */
222 find_function_data (tree decl
)
226 for (p
= outer_function_chain
; p
; p
= p
->outer
)
233 /* Save the current context for compilation of a nested function.
234 This is called from language-specific code. The caller should use
235 the enter_nested langhook to save any language-specific state,
236 since this function knows only about language-independent
240 push_function_context_to (tree context ATTRIBUTE_UNUSED
)
245 init_dummy_function_start ();
248 p
->outer
= outer_function_chain
;
249 outer_function_chain
= p
;
251 lang_hooks
.function
.enter_nested (p
);
257 push_function_context (void)
259 push_function_context_to (current_function_decl
);
262 /* Restore the last saved context, at the end of a nested function.
263 This function is called from language-specific code. */
266 pop_function_context_from (tree context ATTRIBUTE_UNUSED
)
268 struct function
*p
= outer_function_chain
;
271 outer_function_chain
= p
->outer
;
273 current_function_decl
= p
->decl
;
275 lang_hooks
.function
.leave_nested (p
);
277 /* Reset variables that have known state during rtx generation. */
278 virtuals_instantiated
= 0;
279 generating_concat_p
= 1;
283 pop_function_context (void)
285 pop_function_context_from (current_function_decl
);
288 /* Clear out all parts of the state in F that can safely be discarded
289 after the function has been parsed, but not compiled, to let
290 garbage collection reclaim the memory. */
293 free_after_parsing (struct function
*f
)
295 /* f->expr->forced_labels is used by code generation. */
296 /* f->emit->regno_reg_rtx is used by code generation. */
297 /* f->varasm is used by code generation. */
298 /* f->eh->eh_return_stub_label is used by code generation. */
300 lang_hooks
.function
.final (f
);
303 /* Clear out all parts of the state in F that can safely be discarded
304 after the function has been compiled, to let garbage collection
305 reclaim the memory. */
308 free_after_compilation (struct function
*f
)
310 VEC_free (int, heap
, prologue
);
311 VEC_free (int, heap
, epilogue
);
312 VEC_free (int, heap
, sibcall_epilogue
);
321 f
->x_avail_temp_slots
= NULL
;
322 f
->x_used_temp_slots
= NULL
;
323 f
->arg_offset_rtx
= NULL
;
324 f
->return_rtx
= NULL
;
325 f
->internal_arg_pointer
= NULL
;
326 f
->x_nonlocal_goto_handler_labels
= NULL
;
327 f
->x_return_label
= NULL
;
328 f
->x_naked_return_label
= NULL
;
329 f
->x_stack_slot_list
= NULL
;
330 f
->x_stack_check_probe_note
= NULL
;
331 f
->x_arg_pointer_save_area
= NULL
;
332 f
->x_parm_birth_insn
= NULL
;
333 f
->epilogue_delay_list
= NULL
;
336 /* Allocate fixed slots in the stack frame of the current function. */
338 /* Return size needed for stack frame based on slots so far allocated in
340 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
341 the caller may have to do that. */
344 get_func_frame_size (struct function
*f
)
346 if (FRAME_GROWS_DOWNWARD
)
347 return -f
->x_frame_offset
;
349 return f
->x_frame_offset
;
352 /* Return size needed for stack frame based on slots so far allocated.
353 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
354 the caller may have to do that. */
357 get_frame_size (void)
359 return get_func_frame_size (cfun
);
362 /* Issue an error message and return TRUE if frame OFFSET overflows in
363 the signed target pointer arithmetics for function FUNC. Otherwise
367 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
369 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
371 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
372 /* Leave room for the fixed part of the frame. */
373 - 64 * UNITS_PER_WORD
)
375 error ("%Jtotal size of local objects too large", func
);
382 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
383 with machine mode MODE.
385 ALIGN controls the amount of alignment for the address of the slot:
386 0 means according to MODE,
387 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
388 -2 means use BITS_PER_UNIT,
389 positive specifies alignment boundary in bits.
391 We do not round to stack_boundary here.
393 FUNCTION specifies the function to allocate in. */
396 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
, int align
,
397 struct function
*function
)
400 int bigend_correction
= 0;
401 unsigned int alignment
;
402 int frame_off
, frame_alignment
, frame_phase
;
409 alignment
= BIGGEST_ALIGNMENT
;
411 alignment
= GET_MODE_ALIGNMENT (mode
);
413 /* Allow the target to (possibly) increase the alignment of this
415 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
417 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
419 alignment
/= BITS_PER_UNIT
;
421 else if (align
== -1)
423 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
424 size
= CEIL_ROUND (size
, alignment
);
426 else if (align
== -2)
427 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
429 alignment
= align
/ BITS_PER_UNIT
;
431 if (FRAME_GROWS_DOWNWARD
)
432 function
->x_frame_offset
-= size
;
434 /* Ignore alignment we can't do with expected alignment of the boundary. */
435 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
436 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
438 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
439 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
441 /* Calculate how many bytes the start of local variables is off from
443 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
444 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
445 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
447 /* Round the frame offset to the specified alignment. The default is
448 to always honor requests to align the stack but a port may choose to
449 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
450 if (STACK_ALIGNMENT_NEEDED
454 /* We must be careful here, since FRAME_OFFSET might be negative and
455 division with a negative dividend isn't as well defined as we might
456 like. So we instead assume that ALIGNMENT is a power of two and
457 use logical operations which are unambiguous. */
458 if (FRAME_GROWS_DOWNWARD
)
459 function
->x_frame_offset
460 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
,
461 (unsigned HOST_WIDE_INT
) alignment
)
464 function
->x_frame_offset
465 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
,
466 (unsigned HOST_WIDE_INT
) alignment
)
470 /* On a big-endian machine, if we are allocating more space than we will use,
471 use the least significant bytes of those that are allocated. */
472 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
473 bigend_correction
= size
- GET_MODE_SIZE (mode
);
475 /* If we have already instantiated virtual registers, return the actual
476 address relative to the frame pointer. */
477 if (function
== cfun
&& virtuals_instantiated
)
478 addr
= plus_constant (frame_pointer_rtx
,
480 (frame_offset
+ bigend_correction
481 + STARTING_FRAME_OFFSET
, Pmode
));
483 addr
= plus_constant (virtual_stack_vars_rtx
,
485 (function
->x_frame_offset
+ bigend_correction
,
488 if (!FRAME_GROWS_DOWNWARD
)
489 function
->x_frame_offset
+= size
;
491 x
= gen_rtx_MEM (mode
, addr
);
492 MEM_NOTRAP_P (x
) = 1;
494 function
->x_stack_slot_list
495 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
497 if (frame_offset_overflow (function
->x_frame_offset
, function
->decl
))
498 function
->x_frame_offset
= 0;
503 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
507 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
509 return assign_stack_local_1 (mode
, size
, align
, cfun
);
513 /* Removes temporary slot TEMP from LIST. */
516 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
519 temp
->next
->prev
= temp
->prev
;
521 temp
->prev
->next
= temp
->next
;
525 temp
->prev
= temp
->next
= NULL
;
528 /* Inserts temporary slot TEMP to LIST. */
531 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
535 (*list
)->prev
= temp
;
540 /* Returns the list of used temp slots at LEVEL. */
542 static struct temp_slot
**
543 temp_slots_at_level (int level
)
545 if (level
>= (int) VEC_length (temp_slot_p
, used_temp_slots
))
547 size_t old_length
= VEC_length (temp_slot_p
, used_temp_slots
);
550 VEC_safe_grow (temp_slot_p
, gc
, used_temp_slots
, level
+ 1);
551 p
= VEC_address (temp_slot_p
, used_temp_slots
);
552 memset (&p
[old_length
], 0,
553 sizeof (temp_slot_p
) * (level
+ 1 - old_length
));
556 return &(VEC_address (temp_slot_p
, used_temp_slots
)[level
]);
559 /* Returns the maximal temporary slot level. */
562 max_slot_level (void)
564 if (!used_temp_slots
)
567 return VEC_length (temp_slot_p
, used_temp_slots
) - 1;
570 /* Moves temporary slot TEMP to LEVEL. */
573 move_slot_to_level (struct temp_slot
*temp
, int level
)
575 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
576 insert_slot_to_list (temp
, temp_slots_at_level (level
));
580 /* Make temporary slot TEMP available. */
583 make_slot_available (struct temp_slot
*temp
)
585 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
586 insert_slot_to_list (temp
, &avail_temp_slots
);
591 /* Allocate a temporary stack slot and record it for possible later
594 MODE is the machine mode to be given to the returned rtx.
596 SIZE is the size in units of the space required. We do no rounding here
597 since assign_stack_local will do any required rounding.
599 KEEP is 1 if this slot is to be retained after a call to
600 free_temp_slots. Automatic variables for a block are allocated
601 with this flag. KEEP values of 2 or 3 were needed respectively
602 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
603 or for SAVE_EXPRs, but they are now unused.
605 TYPE is the type that will be used for the stack slot. */
608 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
612 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
615 /* If SIZE is -1 it means that somebody tried to allocate a temporary
616 of a variable size. */
617 gcc_assert (size
!= -1);
619 /* These are now unused. */
620 gcc_assert (keep
<= 1);
623 align
= BIGGEST_ALIGNMENT
;
625 align
= GET_MODE_ALIGNMENT (mode
);
628 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
631 align
= LOCAL_ALIGNMENT (type
, align
);
633 /* Try to find an available, already-allocated temporary of the proper
634 mode which meets the size and alignment requirements. Choose the
635 smallest one with the closest alignment.
637 If assign_stack_temp is called outside of the tree->rtl expansion,
638 we cannot reuse the stack slots (that may still refer to
639 VIRTUAL_STACK_VARS_REGNUM). */
640 if (!virtuals_instantiated
)
642 for (p
= avail_temp_slots
; p
; p
= p
->next
)
644 if (p
->align
>= align
&& p
->size
>= size
645 && GET_MODE (p
->slot
) == mode
646 && objects_must_conflict_p (p
->type
, type
)
647 && (best_p
== 0 || best_p
->size
> p
->size
648 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
650 if (p
->align
== align
&& p
->size
== size
)
653 cut_slot_from_list (selected
, &avail_temp_slots
);
662 /* Make our best, if any, the one to use. */
666 cut_slot_from_list (selected
, &avail_temp_slots
);
668 /* If there are enough aligned bytes left over, make them into a new
669 temp_slot so that the extra bytes don't get wasted. Do this only
670 for BLKmode slots, so that we can be sure of the alignment. */
671 if (GET_MODE (best_p
->slot
) == BLKmode
)
673 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
674 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
676 if (best_p
->size
- rounded_size
>= alignment
)
678 p
= ggc_alloc (sizeof (struct temp_slot
));
679 p
->in_use
= p
->addr_taken
= 0;
680 p
->size
= best_p
->size
- rounded_size
;
681 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
682 p
->full_size
= best_p
->full_size
- rounded_size
;
683 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
684 p
->align
= best_p
->align
;
686 p
->type
= best_p
->type
;
687 insert_slot_to_list (p
, &avail_temp_slots
);
689 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
692 best_p
->size
= rounded_size
;
693 best_p
->full_size
= rounded_size
;
698 /* If we still didn't find one, make a new temporary. */
701 HOST_WIDE_INT frame_offset_old
= frame_offset
;
703 p
= ggc_alloc (sizeof (struct temp_slot
));
705 /* We are passing an explicit alignment request to assign_stack_local.
706 One side effect of that is assign_stack_local will not round SIZE
707 to ensure the frame offset remains suitably aligned.
709 So for requests which depended on the rounding of SIZE, we go ahead
710 and round it now. We also make sure ALIGNMENT is at least
711 BIGGEST_ALIGNMENT. */
712 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
713 p
->slot
= assign_stack_local (mode
,
715 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
721 /* The following slot size computation is necessary because we don't
722 know the actual size of the temporary slot until assign_stack_local
723 has performed all the frame alignment and size rounding for the
724 requested temporary. Note that extra space added for alignment
725 can be either above or below this stack slot depending on which
726 way the frame grows. We include the extra space if and only if it
727 is above this slot. */
728 if (FRAME_GROWS_DOWNWARD
)
729 p
->size
= frame_offset_old
- frame_offset
;
733 /* Now define the fields used by combine_temp_slots. */
734 if (FRAME_GROWS_DOWNWARD
)
736 p
->base_offset
= frame_offset
;
737 p
->full_size
= frame_offset_old
- frame_offset
;
741 p
->base_offset
= frame_offset_old
;
742 p
->full_size
= frame_offset
- frame_offset_old
;
753 p
->level
= temp_slot_level
;
756 pp
= temp_slots_at_level (p
->level
);
757 insert_slot_to_list (p
, pp
);
759 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
760 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
761 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
763 /* If we know the alias set for the memory that will be used, use
764 it. If there's no TYPE, then we don't know anything about the
765 alias set for the memory. */
766 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
767 set_mem_align (slot
, align
);
769 /* If a type is specified, set the relevant flags. */
772 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
773 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
775 MEM_NOTRAP_P (slot
) = 1;
780 /* Allocate a temporary stack slot and record it for possible later
781 reuse. First three arguments are same as in preceding function. */
784 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
786 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
789 /* Assign a temporary.
790 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
791 and so that should be used in error messages. In either case, we
792 allocate of the given type.
793 KEEP is as for assign_stack_temp.
794 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
795 it is 0 if a register is OK.
796 DONT_PROMOTE is 1 if we should not promote values in register
800 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
801 int dont_promote ATTRIBUTE_UNUSED
)
804 enum machine_mode mode
;
809 if (DECL_P (type_or_decl
))
810 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
812 decl
= NULL
, type
= type_or_decl
;
814 mode
= TYPE_MODE (type
);
816 unsignedp
= TYPE_UNSIGNED (type
);
819 if (mode
== BLKmode
|| memory_required
)
821 HOST_WIDE_INT size
= int_size_in_bytes (type
);
824 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
825 problems with allocating the stack space. */
829 /* Unfortunately, we don't yet know how to allocate variable-sized
830 temporaries. However, sometimes we can find a fixed upper limit on
831 the size, so try that instead. */
833 size
= max_int_size_in_bytes (type
);
835 /* The size of the temporary may be too large to fit into an integer. */
836 /* ??? Not sure this should happen except for user silliness, so limit
837 this to things that aren't compiler-generated temporaries. The
838 rest of the time we'll die in assign_stack_temp_for_type. */
839 if (decl
&& size
== -1
840 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
842 error ("size of variable %q+D is too large", decl
);
846 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
852 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
855 return gen_reg_rtx (mode
);
858 /* Combine temporary stack slots which are adjacent on the stack.
860 This allows for better use of already allocated stack space. This is only
861 done for BLKmode slots because we can be sure that we won't have alignment
862 problems in this case. */
865 combine_temp_slots (void)
867 struct temp_slot
*p
, *q
, *next
, *next_q
;
870 /* We can't combine slots, because the information about which slot
871 is in which alias set will be lost. */
872 if (flag_strict_aliasing
)
875 /* If there are a lot of temp slots, don't do anything unless
876 high levels of optimization. */
877 if (! flag_expensive_optimizations
)
878 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
879 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
882 for (p
= avail_temp_slots
; p
; p
= next
)
888 if (GET_MODE (p
->slot
) != BLKmode
)
891 for (q
= p
->next
; q
; q
= next_q
)
897 if (GET_MODE (q
->slot
) != BLKmode
)
900 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
902 /* Q comes after P; combine Q into P. */
904 p
->full_size
+= q
->full_size
;
907 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
909 /* P comes after Q; combine P into Q. */
911 q
->full_size
+= p
->full_size
;
916 cut_slot_from_list (q
, &avail_temp_slots
);
919 /* Either delete P or advance past it. */
921 cut_slot_from_list (p
, &avail_temp_slots
);
925 /* Find the temp slot corresponding to the object at address X. */
927 static struct temp_slot
*
928 find_temp_slot_from_address (rtx x
)
934 for (i
= max_slot_level (); i
>= 0; i
--)
935 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
937 if (XEXP (p
->slot
, 0) == x
939 || (GET_CODE (x
) == PLUS
940 && XEXP (x
, 0) == virtual_stack_vars_rtx
941 && GET_CODE (XEXP (x
, 1)) == CONST_INT
942 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
943 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
946 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
947 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
948 if (XEXP (next
, 0) == x
)
952 /* If we have a sum involving a register, see if it points to a temp
954 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
955 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
957 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
958 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
964 /* Indicate that NEW is an alternate way of referring to the temp slot
965 that previously was known by OLD. */
968 update_temp_slot_address (rtx old
, rtx
new)
972 if (rtx_equal_p (old
, new))
975 p
= find_temp_slot_from_address (old
);
977 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
978 is a register, see if one operand of the PLUS is a temporary
979 location. If so, NEW points into it. Otherwise, if both OLD and
980 NEW are a PLUS and if there is a register in common between them.
981 If so, try a recursive call on those values. */
984 if (GET_CODE (old
) != PLUS
)
989 update_temp_slot_address (XEXP (old
, 0), new);
990 update_temp_slot_address (XEXP (old
, 1), new);
993 else if (GET_CODE (new) != PLUS
)
996 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
997 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
998 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
999 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1000 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1001 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1002 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1003 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1008 /* Otherwise add an alias for the temp's address. */
1009 else if (p
->address
== 0)
1013 if (GET_CODE (p
->address
) != EXPR_LIST
)
1014 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1016 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1020 /* If X could be a reference to a temporary slot, mark the fact that its
1021 address was taken. */
1024 mark_temp_addr_taken (rtx x
)
1026 struct temp_slot
*p
;
1031 /* If X is not in memory or is at a constant address, it cannot be in
1032 a temporary slot. */
1033 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1036 p
= find_temp_slot_from_address (XEXP (x
, 0));
1041 /* If X could be a reference to a temporary slot, mark that slot as
1042 belonging to the to one level higher than the current level. If X
1043 matched one of our slots, just mark that one. Otherwise, we can't
1044 easily predict which it is, so upgrade all of them. Kept slots
1045 need not be touched.
1047 This is called when an ({...}) construct occurs and a statement
1048 returns a value in memory. */
1051 preserve_temp_slots (rtx x
)
1053 struct temp_slot
*p
= 0, *next
;
1055 /* If there is no result, we still might have some objects whose address
1056 were taken, so we need to make sure they stay around. */
1059 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1064 move_slot_to_level (p
, temp_slot_level
- 1);
1070 /* If X is a register that is being used as a pointer, see if we have
1071 a temporary slot we know it points to. To be consistent with
1072 the code below, we really should preserve all non-kept slots
1073 if we can't find a match, but that seems to be much too costly. */
1074 if (REG_P (x
) && REG_POINTER (x
))
1075 p
= find_temp_slot_from_address (x
);
1077 /* If X is not in memory or is at a constant address, it cannot be in
1078 a temporary slot, but it can contain something whose address was
1080 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1082 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1087 move_slot_to_level (p
, temp_slot_level
- 1);
1093 /* First see if we can find a match. */
1095 p
= find_temp_slot_from_address (XEXP (x
, 0));
1099 /* Move everything at our level whose address was taken to our new
1100 level in case we used its address. */
1101 struct temp_slot
*q
;
1103 if (p
->level
== temp_slot_level
)
1105 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1109 if (p
!= q
&& q
->addr_taken
)
1110 move_slot_to_level (q
, temp_slot_level
- 1);
1113 move_slot_to_level (p
, temp_slot_level
- 1);
1119 /* Otherwise, preserve all non-kept slots at this level. */
1120 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1125 move_slot_to_level (p
, temp_slot_level
- 1);
1129 /* Free all temporaries used so far. This is normally called at the
1130 end of generating code for a statement. */
1133 free_temp_slots (void)
1135 struct temp_slot
*p
, *next
;
1137 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1142 make_slot_available (p
);
1145 combine_temp_slots ();
1148 /* Push deeper into the nesting level for stack temporaries. */
1151 push_temp_slots (void)
1156 /* Pop a temporary nesting level. All slots in use in the current level
1160 pop_temp_slots (void)
1162 struct temp_slot
*p
, *next
;
1164 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1167 make_slot_available (p
);
1170 combine_temp_slots ();
1175 /* Initialize temporary slots. */
1178 init_temp_slots (void)
1180 /* We have not allocated any temporaries yet. */
1181 avail_temp_slots
= 0;
1182 used_temp_slots
= 0;
1183 temp_slot_level
= 0;
1186 /* These routines are responsible for converting virtual register references
1187 to the actual hard register references once RTL generation is complete.
1189 The following four variables are used for communication between the
1190 routines. They contain the offsets of the virtual registers from their
1191 respective hard registers. */
1193 static int in_arg_offset
;
1194 static int var_offset
;
1195 static int dynamic_offset
;
1196 static int out_arg_offset
;
1197 static int cfa_offset
;
1199 /* In most machines, the stack pointer register is equivalent to the bottom
1202 #ifndef STACK_POINTER_OFFSET
1203 #define STACK_POINTER_OFFSET 0
1206 /* If not defined, pick an appropriate default for the offset of dynamically
1207 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1208 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1210 #ifndef STACK_DYNAMIC_OFFSET
1212 /* The bottom of the stack points to the actual arguments. If
1213 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1214 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1215 stack space for register parameters is not pushed by the caller, but
1216 rather part of the fixed stack areas and hence not included in
1217 `current_function_outgoing_args_size'. Nevertheless, we must allow
1218 for it when allocating stack dynamic objects. */
1220 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1221 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1222 ((ACCUMULATE_OUTGOING_ARGS \
1223 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1224 + (STACK_POINTER_OFFSET)) \
1227 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1228 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1229 + (STACK_POINTER_OFFSET))
1234 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1235 is a virtual register, return the equivalent hard register and set the
1236 offset indirectly through the pointer. Otherwise, return 0. */
1239 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1242 HOST_WIDE_INT offset
;
1244 if (x
== virtual_incoming_args_rtx
)
1245 new = arg_pointer_rtx
, offset
= in_arg_offset
;
1246 else if (x
== virtual_stack_vars_rtx
)
1247 new = frame_pointer_rtx
, offset
= var_offset
;
1248 else if (x
== virtual_stack_dynamic_rtx
)
1249 new = stack_pointer_rtx
, offset
= dynamic_offset
;
1250 else if (x
== virtual_outgoing_args_rtx
)
1251 new = stack_pointer_rtx
, offset
= out_arg_offset
;
1252 else if (x
== virtual_cfa_rtx
)
1254 #ifdef FRAME_POINTER_CFA_OFFSET
1255 new = frame_pointer_rtx
;
1257 new = arg_pointer_rtx
;
1259 offset
= cfa_offset
;
1268 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1269 Instantiate any virtual registers present inside of *LOC. The expression
1270 is simplified, as much as possible, but is not to be considered "valid"
1271 in any sense implied by the target. If any change is made, set CHANGED
1275 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1277 HOST_WIDE_INT offset
;
1278 bool *changed
= (bool *) data
;
1285 switch (GET_CODE (x
))
1288 new = instantiate_new_reg (x
, &offset
);
1291 *loc
= plus_constant (new, offset
);
1298 new = instantiate_new_reg (XEXP (x
, 0), &offset
);
1301 new = plus_constant (new, offset
);
1302 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new, XEXP (x
, 1));
1308 /* FIXME -- from old code */
1309 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1310 we can commute the PLUS and SUBREG because pointers into the
1311 frame are well-behaved. */
1321 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1322 matches the predicate for insn CODE operand OPERAND. */
1325 safe_insn_predicate (int code
, int operand
, rtx x
)
1327 const struct insn_operand_data
*op_data
;
1332 op_data
= &insn_data
[code
].operand
[operand
];
1333 if (op_data
->predicate
== NULL
)
1336 return op_data
->predicate (x
, op_data
->mode
);
1339 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1340 registers present inside of insn. The result will be a valid insn. */
1343 instantiate_virtual_regs_in_insn (rtx insn
)
1345 HOST_WIDE_INT offset
;
1347 bool any_change
= false;
1348 rtx set
, new, x
, seq
;
1350 /* There are some special cases to be handled first. */
1351 set
= single_set (insn
);
1354 /* We're allowed to assign to a virtual register. This is interpreted
1355 to mean that the underlying register gets assigned the inverse
1356 transformation. This is used, for example, in the handling of
1358 new = instantiate_new_reg (SET_DEST (set
), &offset
);
1363 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1364 x
= simplify_gen_binary (PLUS
, GET_MODE (new), SET_SRC (set
),
1366 x
= force_operand (x
, new);
1368 emit_move_insn (new, x
);
1373 emit_insn_before (seq
, insn
);
1378 /* Handle a straight copy from a virtual register by generating a
1379 new add insn. The difference between this and falling through
1380 to the generic case is avoiding a new pseudo and eliminating a
1381 move insn in the initial rtl stream. */
1382 new = instantiate_new_reg (SET_SRC (set
), &offset
);
1383 if (new && offset
!= 0
1384 && REG_P (SET_DEST (set
))
1385 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1389 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1390 new, GEN_INT (offset
), SET_DEST (set
),
1391 1, OPTAB_LIB_WIDEN
);
1392 if (x
!= SET_DEST (set
))
1393 emit_move_insn (SET_DEST (set
), x
);
1398 emit_insn_before (seq
, insn
);
1403 extract_insn (insn
);
1404 insn_code
= INSN_CODE (insn
);
1406 /* Handle a plus involving a virtual register by determining if the
1407 operands remain valid if they're modified in place. */
1408 if (GET_CODE (SET_SRC (set
)) == PLUS
1409 && recog_data
.n_operands
>= 3
1410 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1411 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1412 && GET_CODE (recog_data
.operand
[2]) == CONST_INT
1413 && (new = instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1415 offset
+= INTVAL (recog_data
.operand
[2]);
1417 /* If the sum is zero, then replace with a plain move. */
1419 && REG_P (SET_DEST (set
))
1420 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1423 emit_move_insn (SET_DEST (set
), new);
1427 emit_insn_before (seq
, insn
);
1432 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1434 /* Using validate_change and apply_change_group here leaves
1435 recog_data in an invalid state. Since we know exactly what
1436 we want to check, do those two by hand. */
1437 if (safe_insn_predicate (insn_code
, 1, new)
1438 && safe_insn_predicate (insn_code
, 2, x
))
1440 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new;
1441 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1444 /* Fall through into the regular operand fixup loop in
1445 order to take care of operands other than 1 and 2. */
1451 extract_insn (insn
);
1452 insn_code
= INSN_CODE (insn
);
1455 /* In the general case, we expect virtual registers to appear only in
1456 operands, and then only as either bare registers or inside memories. */
1457 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1459 x
= recog_data
.operand
[i
];
1460 switch (GET_CODE (x
))
1464 rtx addr
= XEXP (x
, 0);
1465 bool changed
= false;
1467 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1472 x
= replace_equiv_address (x
, addr
);
1476 emit_insn_before (seq
, insn
);
1481 new = instantiate_new_reg (x
, &offset
);
1490 /* Careful, special mode predicates may have stuff in
1491 insn_data[insn_code].operand[i].mode that isn't useful
1492 to us for computing a new value. */
1493 /* ??? Recognize address_operand and/or "p" constraints
1494 to see if (plus new offset) is a valid before we put
1495 this through expand_simple_binop. */
1496 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new,
1497 GEN_INT (offset
), NULL_RTX
,
1498 1, OPTAB_LIB_WIDEN
);
1501 emit_insn_before (seq
, insn
);
1506 new = instantiate_new_reg (SUBREG_REG (x
), &offset
);
1512 new = expand_simple_binop (GET_MODE (new), PLUS
, new,
1513 GEN_INT (offset
), NULL_RTX
,
1514 1, OPTAB_LIB_WIDEN
);
1517 emit_insn_before (seq
, insn
);
1519 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new,
1520 GET_MODE (new), SUBREG_BYTE (x
));
1527 /* At this point, X contains the new value for the operand.
1528 Validate the new value vs the insn predicate. Note that
1529 asm insns will have insn_code -1 here. */
1530 if (!safe_insn_predicate (insn_code
, i
, x
))
1533 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1537 emit_insn_before (seq
, insn
);
1540 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1546 /* Propagate operand changes into the duplicates. */
1547 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1548 *recog_data
.dup_loc
[i
]
1549 = recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]];
1551 /* Force re-recognition of the instruction for validation. */
1552 INSN_CODE (insn
) = -1;
1555 if (asm_noperands (PATTERN (insn
)) >= 0)
1557 if (!check_asm_operands (PATTERN (insn
)))
1559 error_for_asm (insn
, "impossible constraint in %<asm%>");
1565 if (recog_memoized (insn
) < 0)
1566 fatal_insn_not_found (insn
);
1570 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1571 do any instantiation required. */
1574 instantiate_decl (rtx x
)
1581 /* If this is a CONCAT, recurse for the pieces. */
1582 if (GET_CODE (x
) == CONCAT
)
1584 instantiate_decl (XEXP (x
, 0));
1585 instantiate_decl (XEXP (x
, 1));
1589 /* If this is not a MEM, no need to do anything. Similarly if the
1590 address is a constant or a register that is not a virtual register. */
1595 if (CONSTANT_P (addr
)
1597 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1598 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1601 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1604 /* Helper for instantiate_decls called via walk_tree: Process all decls
1605 in the given DECL_VALUE_EXPR. */
1608 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1614 if (DECL_P (t
) && DECL_RTL_SET_P (t
))
1615 instantiate_decl (DECL_RTL (t
));
1620 /* Subroutine of instantiate_decls: Process all decls in the given
1621 BLOCK node and all its subblocks. */
1624 instantiate_decls_1 (tree let
)
1628 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1630 if (DECL_RTL_SET_P (t
))
1631 instantiate_decl (DECL_RTL (t
));
1632 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1634 tree v
= DECL_VALUE_EXPR (t
);
1635 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1639 /* Process all subblocks. */
1640 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1641 instantiate_decls_1 (t
);
1644 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1645 all virtual registers in their DECL_RTL's. */
1648 instantiate_decls (tree fndecl
)
1652 /* Process all parameters of the function. */
1653 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1655 instantiate_decl (DECL_RTL (decl
));
1656 instantiate_decl (DECL_INCOMING_RTL (decl
));
1657 if (DECL_HAS_VALUE_EXPR_P (decl
))
1659 tree v
= DECL_VALUE_EXPR (decl
);
1660 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1664 /* Now process all variables defined in the function or its subblocks. */
1665 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1668 /* Pass through the INSNS of function FNDECL and convert virtual register
1669 references to hard register references. */
1672 instantiate_virtual_regs (void)
1676 /* Compute the offsets to use for this function. */
1677 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1678 var_offset
= STARTING_FRAME_OFFSET
;
1679 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1680 out_arg_offset
= STACK_POINTER_OFFSET
;
1681 #ifdef FRAME_POINTER_CFA_OFFSET
1682 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1684 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1687 /* Initialize recognition, indicating that volatile is OK. */
1690 /* Scan through all the insns, instantiating every virtual register still
1692 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1695 /* These patterns in the instruction stream can never be recognized.
1696 Fortunately, they shouldn't contain virtual registers either. */
1697 if (GET_CODE (PATTERN (insn
)) == USE
1698 || GET_CODE (PATTERN (insn
)) == CLOBBER
1699 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1700 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1701 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1704 instantiate_virtual_regs_in_insn (insn
);
1706 if (INSN_DELETED_P (insn
))
1709 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1711 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1712 if (GET_CODE (insn
) == CALL_INSN
)
1713 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1714 instantiate_virtual_regs_in_rtx
, NULL
);
1717 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1718 instantiate_decls (current_function_decl
);
1720 /* Indicate that, from now on, assign_stack_local should use
1721 frame_pointer_rtx. */
1722 virtuals_instantiated
= 1;
1726 struct tree_opt_pass pass_instantiate_virtual_regs
=
1730 instantiate_virtual_regs
, /* execute */
1733 0, /* static_pass_number */
1735 0, /* properties_required */
1736 0, /* properties_provided */
1737 0, /* properties_destroyed */
1738 0, /* todo_flags_start */
1739 TODO_dump_func
, /* todo_flags_finish */
1744 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1745 This means a type for which function calls must pass an address to the
1746 function or get an address back from the function.
1747 EXP may be a type node or an expression (whose type is tested). */
1750 aggregate_value_p (tree exp
, tree fntype
)
1752 int i
, regno
, nregs
;
1755 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1757 /* DECL node associated with FNTYPE when relevant, which we might need to
1758 check for by-invisible-reference returns, typically for CALL_EXPR input
1760 tree fndecl
= NULL_TREE
;
1763 switch (TREE_CODE (fntype
))
1766 fndecl
= get_callee_fndecl (fntype
);
1767 fntype
= fndecl
? TREE_TYPE (fndecl
) : 0;
1771 fntype
= TREE_TYPE (fndecl
);
1776 case IDENTIFIER_NODE
:
1780 /* We don't expect other rtl types here. */
1784 if (TREE_CODE (type
) == VOID_TYPE
)
1787 /* If the front end has decided that this needs to be passed by
1788 reference, do so. */
1789 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
1790 && DECL_BY_REFERENCE (exp
))
1793 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1794 called function RESULT_DECL, meaning the function returns in memory by
1795 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1796 on the function type, which used to be the way to request such a return
1797 mechanism but might now be causing troubles at gimplification time if
1798 temporaries with the function type need to be created. */
1799 if (TREE_CODE (exp
) == CALL_EXPR
&& fndecl
&& DECL_RESULT (fndecl
)
1800 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
1803 if (targetm
.calls
.return_in_memory (type
, fntype
))
1805 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1806 and thus can't be returned in registers. */
1807 if (TREE_ADDRESSABLE (type
))
1809 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
1811 /* Make sure we have suitable call-clobbered regs to return
1812 the value in; if not, we must return it in memory. */
1813 reg
= hard_function_value (type
, 0, fntype
, 0);
1815 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1820 regno
= REGNO (reg
);
1821 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
1822 for (i
= 0; i
< nregs
; i
++)
1823 if (! call_used_regs
[regno
+ i
])
1828 /* Return true if we should assign DECL a pseudo register; false if it
1829 should live on the local stack. */
1832 use_register_for_decl (tree decl
)
1834 /* Honor volatile. */
1835 if (TREE_SIDE_EFFECTS (decl
))
1838 /* Honor addressability. */
1839 if (TREE_ADDRESSABLE (decl
))
1842 /* Only register-like things go in registers. */
1843 if (DECL_MODE (decl
) == BLKmode
)
1846 /* If -ffloat-store specified, don't put explicit float variables
1848 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1849 propagates values across these stores, and it probably shouldn't. */
1850 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
1853 /* If we're not interested in tracking debugging information for
1854 this decl, then we can certainly put it in a register. */
1855 if (DECL_IGNORED_P (decl
))
1858 return (optimize
|| DECL_REGISTER (decl
));
1861 /* Return true if TYPE should be passed by invisible reference. */
1864 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1865 tree type
, bool named_arg
)
1869 /* If this type contains non-trivial constructors, then it is
1870 forbidden for the middle-end to create any new copies. */
1871 if (TREE_ADDRESSABLE (type
))
1874 /* GCC post 3.4 passes *all* variable sized types by reference. */
1875 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
1879 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
1882 /* Return true if TYPE, which is passed by reference, should be callee
1883 copied instead of caller copied. */
1886 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1887 tree type
, bool named_arg
)
1889 if (type
&& TREE_ADDRESSABLE (type
))
1891 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
1894 /* Structures to communicate between the subroutines of assign_parms.
1895 The first holds data persistent across all parameters, the second
1896 is cleared out for each parameter. */
1898 struct assign_parm_data_all
1900 CUMULATIVE_ARGS args_so_far
;
1901 struct args_size stack_args_size
;
1902 tree function_result_decl
;
1904 rtx conversion_insns
;
1905 HOST_WIDE_INT pretend_args_size
;
1906 HOST_WIDE_INT extra_pretend_bytes
;
1907 int reg_parm_stack_space
;
1910 struct assign_parm_data_one
1916 enum machine_mode nominal_mode
;
1917 enum machine_mode passed_mode
;
1918 enum machine_mode promoted_mode
;
1919 struct locate_and_pad_arg_data locate
;
1921 BOOL_BITFIELD named_arg
: 1;
1922 BOOL_BITFIELD passed_pointer
: 1;
1923 BOOL_BITFIELD on_stack
: 1;
1924 BOOL_BITFIELD loaded_in_reg
: 1;
1927 /* A subroutine of assign_parms. Initialize ALL. */
1930 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
1934 memset (all
, 0, sizeof (*all
));
1936 fntype
= TREE_TYPE (current_function_decl
);
1938 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1939 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
1941 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
1942 current_function_decl
, -1);
1945 #ifdef REG_PARM_STACK_SPACE
1946 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
1950 /* If ARGS contains entries with complex types, split the entry into two
1951 entries of the component type. Return a new list of substitutions are
1952 needed, else the old list. */
1955 split_complex_args (tree args
)
1959 /* Before allocating memory, check for the common case of no complex. */
1960 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1962 tree type
= TREE_TYPE (p
);
1963 if (TREE_CODE (type
) == COMPLEX_TYPE
1964 && targetm
.calls
.split_complex_arg (type
))
1970 args
= copy_list (args
);
1972 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1974 tree type
= TREE_TYPE (p
);
1975 if (TREE_CODE (type
) == COMPLEX_TYPE
1976 && targetm
.calls
.split_complex_arg (type
))
1979 tree subtype
= TREE_TYPE (type
);
1980 bool addressable
= TREE_ADDRESSABLE (p
);
1982 /* Rewrite the PARM_DECL's type with its component. */
1983 TREE_TYPE (p
) = subtype
;
1984 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
1985 DECL_MODE (p
) = VOIDmode
;
1986 DECL_SIZE (p
) = NULL
;
1987 DECL_SIZE_UNIT (p
) = NULL
;
1988 /* If this arg must go in memory, put it in a pseudo here.
1989 We can't allow it to go in memory as per normal parms,
1990 because the usual place might not have the imag part
1991 adjacent to the real part. */
1992 DECL_ARTIFICIAL (p
) = addressable
;
1993 DECL_IGNORED_P (p
) = addressable
;
1994 TREE_ADDRESSABLE (p
) = 0;
1997 /* Build a second synthetic decl. */
1998 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
1999 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2000 DECL_ARTIFICIAL (decl
) = addressable
;
2001 DECL_IGNORED_P (decl
) = addressable
;
2002 layout_decl (decl
, 0);
2004 /* Splice it in; skip the new decl. */
2005 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
2006 TREE_CHAIN (p
) = decl
;
2014 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2015 the hidden struct return argument, and (abi willing) complex args.
2016 Return the new parameter list. */
2019 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2021 tree fndecl
= current_function_decl
;
2022 tree fntype
= TREE_TYPE (fndecl
);
2023 tree fnargs
= DECL_ARGUMENTS (fndecl
);
2025 /* If struct value address is treated as the first argument, make it so. */
2026 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2027 && ! current_function_returns_pcc_struct
2028 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2030 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2033 decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
2034 DECL_ARG_TYPE (decl
) = type
;
2035 DECL_ARTIFICIAL (decl
) = 1;
2036 DECL_IGNORED_P (decl
) = 1;
2038 TREE_CHAIN (decl
) = fnargs
;
2040 all
->function_result_decl
= decl
;
2043 all
->orig_fnargs
= fnargs
;
2045 /* If the target wants to split complex arguments into scalars, do so. */
2046 if (targetm
.calls
.split_complex_arg
)
2047 fnargs
= split_complex_args (fnargs
);
2052 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2053 data for the parameter. Incorporate ABI specifics such as pass-by-
2054 reference and type promotion. */
2057 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2058 struct assign_parm_data_one
*data
)
2060 tree nominal_type
, passed_type
;
2061 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2063 memset (data
, 0, sizeof (*data
));
2065 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2066 if (!current_function_stdarg
)
2067 data
->named_arg
= 1; /* No varadic parms. */
2068 else if (TREE_CHAIN (parm
))
2069 data
->named_arg
= 1; /* Not the last non-varadic parm. */
2070 else if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2071 data
->named_arg
= 1; /* Only varadic ones are unnamed. */
2073 data
->named_arg
= 0; /* Treat as varadic. */
2075 nominal_type
= TREE_TYPE (parm
);
2076 passed_type
= DECL_ARG_TYPE (parm
);
2078 /* Look out for errors propagating this far. Also, if the parameter's
2079 type is void then its value doesn't matter. */
2080 if (TREE_TYPE (parm
) == error_mark_node
2081 /* This can happen after weird syntax errors
2082 or if an enum type is defined among the parms. */
2083 || TREE_CODE (parm
) != PARM_DECL
2084 || passed_type
== NULL
2085 || VOID_TYPE_P (nominal_type
))
2087 nominal_type
= passed_type
= void_type_node
;
2088 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2092 /* Find mode of arg as it is passed, and mode of arg as it should be
2093 during execution of this function. */
2094 passed_mode
= TYPE_MODE (passed_type
);
2095 nominal_mode
= TYPE_MODE (nominal_type
);
2097 /* If the parm is to be passed as a transparent union, use the type of
2098 the first field for the tests below. We have already verified that
2099 the modes are the same. */
2100 if (TREE_CODE (passed_type
) == UNION_TYPE
2101 && TYPE_TRANSPARENT_UNION (passed_type
))
2102 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
2104 /* See if this arg was passed by invisible reference. */
2105 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2106 passed_type
, data
->named_arg
))
2108 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2109 data
->passed_pointer
= true;
2110 passed_mode
= nominal_mode
= Pmode
;
2113 /* Find mode as it is passed by the ABI. */
2114 promoted_mode
= passed_mode
;
2115 if (targetm
.calls
.promote_function_args (TREE_TYPE (current_function_decl
)))
2117 int unsignedp
= TYPE_UNSIGNED (passed_type
);
2118 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
2123 data
->nominal_type
= nominal_type
;
2124 data
->passed_type
= passed_type
;
2125 data
->nominal_mode
= nominal_mode
;
2126 data
->passed_mode
= passed_mode
;
2127 data
->promoted_mode
= promoted_mode
;
2130 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2133 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2134 struct assign_parm_data_one
*data
, bool no_rtl
)
2136 int varargs_pretend_bytes
= 0;
2138 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2139 data
->promoted_mode
,
2141 &varargs_pretend_bytes
, no_rtl
);
2143 /* If the back-end has requested extra stack space, record how much is
2144 needed. Do not change pretend_args_size otherwise since it may be
2145 nonzero from an earlier partial argument. */
2146 if (varargs_pretend_bytes
> 0)
2147 all
->pretend_args_size
= varargs_pretend_bytes
;
2150 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2151 the incoming location of the current parameter. */
2154 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2155 struct assign_parm_data_one
*data
)
2157 HOST_WIDE_INT pretend_bytes
= 0;
2161 if (data
->promoted_mode
== VOIDmode
)
2163 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2167 #ifdef FUNCTION_INCOMING_ARG
2168 entry_parm
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2169 data
->passed_type
, data
->named_arg
);
2171 entry_parm
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2172 data
->passed_type
, data
->named_arg
);
2175 if (entry_parm
== 0)
2176 data
->promoted_mode
= data
->passed_mode
;
2178 /* Determine parm's home in the stack, in case it arrives in the stack
2179 or we should pretend it did. Compute the stack position and rtx where
2180 the argument arrives and its size.
2182 There is one complexity here: If this was a parameter that would
2183 have been passed in registers, but wasn't only because it is
2184 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2185 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2186 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2187 as it was the previous time. */
2188 in_regs
= entry_parm
!= 0;
2189 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2192 if (!in_regs
&& !data
->named_arg
)
2194 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2197 #ifdef FUNCTION_INCOMING_ARG
2198 tem
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2199 data
->passed_type
, true);
2201 tem
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2202 data
->passed_type
, true);
2204 in_regs
= tem
!= NULL
;
2208 /* If this parameter was passed both in registers and in the stack, use
2209 the copy on the stack. */
2210 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2218 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2219 data
->promoted_mode
,
2222 data
->partial
= partial
;
2224 /* The caller might already have allocated stack space for the
2225 register parameters. */
2226 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2228 /* Part of this argument is passed in registers and part
2229 is passed on the stack. Ask the prologue code to extend
2230 the stack part so that we can recreate the full value.
2232 PRETEND_BYTES is the size of the registers we need to store.
2233 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2234 stack space that the prologue should allocate.
2236 Internally, gcc assumes that the argument pointer is aligned
2237 to STACK_BOUNDARY bits. This is used both for alignment
2238 optimizations (see init_emit) and to locate arguments that are
2239 aligned to more than PARM_BOUNDARY bits. We must preserve this
2240 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2241 a stack boundary. */
2243 /* We assume at most one partial arg, and it must be the first
2244 argument on the stack. */
2245 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2247 pretend_bytes
= partial
;
2248 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2250 /* We want to align relative to the actual stack pointer, so
2251 don't include this in the stack size until later. */
2252 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2256 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2257 entry_parm
? data
->partial
: 0, current_function_decl
,
2258 &all
->stack_args_size
, &data
->locate
);
2260 /* Adjust offsets to include the pretend args. */
2261 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2262 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2263 data
->locate
.offset
.constant
+= pretend_bytes
;
2265 data
->entry_parm
= entry_parm
;
2268 /* A subroutine of assign_parms. If there is actually space on the stack
2269 for this parm, count it in stack_args_size and return true. */
2272 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2273 struct assign_parm_data_one
*data
)
2275 /* Trivially true if we've no incoming register. */
2276 if (data
->entry_parm
== NULL
)
2278 /* Also true if we're partially in registers and partially not,
2279 since we've arranged to drop the entire argument on the stack. */
2280 else if (data
->partial
!= 0)
2282 /* Also true if the target says that it's passed in both registers
2283 and on the stack. */
2284 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2285 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2287 /* Also true if the target says that there's stack allocated for
2288 all register parameters. */
2289 else if (all
->reg_parm_stack_space
> 0)
2291 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2295 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2296 if (data
->locate
.size
.var
)
2297 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2302 /* A subroutine of assign_parms. Given that this parameter is allocated
2303 stack space by the ABI, find it. */
2306 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2308 rtx offset_rtx
, stack_parm
;
2309 unsigned int align
, boundary
;
2311 /* If we're passing this arg using a reg, make its stack home the
2312 aligned stack slot. */
2313 if (data
->entry_parm
)
2314 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2316 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2318 stack_parm
= current_function_internal_arg_pointer
;
2319 if (offset_rtx
!= const0_rtx
)
2320 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2321 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2323 set_mem_attributes (stack_parm
, parm
, 1);
2325 boundary
= data
->locate
.boundary
;
2326 align
= BITS_PER_UNIT
;
2328 /* If we're padding upward, we know that the alignment of the slot
2329 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2330 intentionally forcing upward padding. Otherwise we have to come
2331 up with a guess at the alignment based on OFFSET_RTX. */
2332 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2334 else if (GET_CODE (offset_rtx
) == CONST_INT
)
2336 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2337 align
= align
& -align
;
2339 set_mem_align (stack_parm
, align
);
2341 if (data
->entry_parm
)
2342 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2344 data
->stack_parm
= stack_parm
;
2347 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2348 always valid and contiguous. */
2351 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2353 rtx entry_parm
= data
->entry_parm
;
2354 rtx stack_parm
= data
->stack_parm
;
2356 /* If this parm was passed part in regs and part in memory, pretend it
2357 arrived entirely in memory by pushing the register-part onto the stack.
2358 In the special case of a DImode or DFmode that is split, we could put
2359 it together in a pseudoreg directly, but for now that's not worth
2361 if (data
->partial
!= 0)
2363 /* Handle calls that pass values in multiple non-contiguous
2364 locations. The Irix 6 ABI has examples of this. */
2365 if (GET_CODE (entry_parm
) == PARALLEL
)
2366 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2368 int_size_in_bytes (data
->passed_type
));
2371 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2372 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2373 data
->partial
/ UNITS_PER_WORD
);
2376 entry_parm
= stack_parm
;
2379 /* If we didn't decide this parm came in a register, by default it came
2381 else if (entry_parm
== NULL
)
2382 entry_parm
= stack_parm
;
2384 /* When an argument is passed in multiple locations, we can't make use
2385 of this information, but we can save some copying if the whole argument
2386 is passed in a single register. */
2387 else if (GET_CODE (entry_parm
) == PARALLEL
2388 && data
->nominal_mode
!= BLKmode
2389 && data
->passed_mode
!= BLKmode
)
2391 size_t i
, len
= XVECLEN (entry_parm
, 0);
2393 for (i
= 0; i
< len
; i
++)
2394 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2395 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2396 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2397 == data
->passed_mode
)
2398 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2400 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2405 data
->entry_parm
= entry_parm
;
2408 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2409 always valid and properly aligned. */
2412 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2414 rtx stack_parm
= data
->stack_parm
;
2416 /* If we can't trust the parm stack slot to be aligned enough for its
2417 ultimate type, don't use that slot after entry. We'll make another
2418 stack slot, if we need one. */
2420 && ((STRICT_ALIGNMENT
2421 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2422 || (data
->nominal_type
2423 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2424 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2427 /* If parm was passed in memory, and we need to convert it on entry,
2428 don't store it back in that same slot. */
2429 else if (data
->entry_parm
== stack_parm
2430 && data
->nominal_mode
!= BLKmode
2431 && data
->nominal_mode
!= data
->passed_mode
)
2434 /* If stack protection is in effect for this function, don't leave any
2435 pointers in their passed stack slots. */
2436 else if (cfun
->stack_protect_guard
2437 && (flag_stack_protect
== 2
2438 || data
->passed_pointer
2439 || POINTER_TYPE_P (data
->nominal_type
)))
2442 data
->stack_parm
= stack_parm
;
2445 /* A subroutine of assign_parms. Return true if the current parameter
2446 should be stored as a BLKmode in the current frame. */
2449 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2451 if (data
->nominal_mode
== BLKmode
)
2453 if (GET_CODE (data
->entry_parm
) == PARALLEL
)
2456 #ifdef BLOCK_REG_PADDING
2457 /* Only assign_parm_setup_block knows how to deal with register arguments
2458 that are padded at the least significant end. */
2459 if (REG_P (data
->entry_parm
)
2460 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2461 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2462 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2469 /* A subroutine of assign_parms. Arrange for the parameter to be
2470 present and valid in DATA->STACK_RTL. */
2473 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2474 tree parm
, struct assign_parm_data_one
*data
)
2476 rtx entry_parm
= data
->entry_parm
;
2477 rtx stack_parm
= data
->stack_parm
;
2479 HOST_WIDE_INT size_stored
;
2480 rtx orig_entry_parm
= entry_parm
;
2482 if (GET_CODE (entry_parm
) == PARALLEL
)
2483 entry_parm
= emit_group_move_into_temps (entry_parm
);
2485 /* If we've a non-block object that's nevertheless passed in parts,
2486 reconstitute it in register operations rather than on the stack. */
2487 if (GET_CODE (entry_parm
) == PARALLEL
2488 && data
->nominal_mode
!= BLKmode
)
2490 rtx elt0
= XEXP (XVECEXP (orig_entry_parm
, 0, 0), 0);
2492 if ((XVECLEN (entry_parm
, 0) > 1
2493 || hard_regno_nregs
[REGNO (elt0
)][GET_MODE (elt0
)] > 1)
2494 && use_register_for_decl (parm
))
2496 rtx parmreg
= gen_reg_rtx (data
->nominal_mode
);
2498 push_to_sequence (all
->conversion_insns
);
2500 /* For values returned in multiple registers, handle possible
2501 incompatible calls to emit_group_store.
2503 For example, the following would be invalid, and would have to
2504 be fixed by the conditional below:
2506 emit_group_store ((reg:SF), (parallel:DF))
2507 emit_group_store ((reg:SI), (parallel:DI))
2509 An example of this are doubles in e500 v2:
2510 (parallel:DF (expr_list (reg:SI) (const_int 0))
2511 (expr_list (reg:SI) (const_int 4))). */
2512 if (data
->nominal_mode
!= data
->passed_mode
)
2514 rtx t
= gen_reg_rtx (GET_MODE (entry_parm
));
2515 emit_group_store (t
, entry_parm
, NULL_TREE
,
2516 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2517 convert_move (parmreg
, t
, 0);
2520 emit_group_store (parmreg
, entry_parm
, data
->nominal_type
,
2521 int_size_in_bytes (data
->nominal_type
));
2523 all
->conversion_insns
= get_insns ();
2526 SET_DECL_RTL (parm
, parmreg
);
2531 size
= int_size_in_bytes (data
->passed_type
);
2532 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2533 if (stack_parm
== 0)
2535 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2536 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2538 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2539 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2540 set_mem_attributes (stack_parm
, parm
, 1);
2543 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2544 calls that pass values in multiple non-contiguous locations. */
2545 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2549 /* Note that we will be storing an integral number of words.
2550 So we have to be careful to ensure that we allocate an
2551 integral number of words. We do this above when we call
2552 assign_stack_local if space was not allocated in the argument
2553 list. If it was, this will not work if PARM_BOUNDARY is not
2554 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2555 if it becomes a problem. Exception is when BLKmode arrives
2556 with arguments not conforming to word_mode. */
2558 if (data
->stack_parm
== 0)
2560 else if (GET_CODE (entry_parm
) == PARALLEL
)
2563 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2565 mem
= validize_mem (stack_parm
);
2567 /* Handle values in multiple non-contiguous locations. */
2568 if (GET_CODE (entry_parm
) == PARALLEL
)
2570 push_to_sequence (all
->conversion_insns
);
2571 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2572 all
->conversion_insns
= get_insns ();
2579 /* If SIZE is that of a mode no bigger than a word, just use
2580 that mode's store operation. */
2581 else if (size
<= UNITS_PER_WORD
)
2583 enum machine_mode mode
2584 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2587 #ifdef BLOCK_REG_PADDING
2588 && (size
== UNITS_PER_WORD
2589 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2590 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2594 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2595 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2598 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2599 machine must be aligned to the left before storing
2600 to memory. Note that the previous test doesn't
2601 handle all cases (e.g. SIZE == 3). */
2602 else if (size
!= UNITS_PER_WORD
2603 #ifdef BLOCK_REG_PADDING
2604 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2612 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2613 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2615 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2616 build_int_cst (NULL_TREE
, by
),
2618 tem
= change_address (mem
, word_mode
, 0);
2619 emit_move_insn (tem
, x
);
2622 move_block_from_reg (REGNO (entry_parm
), mem
,
2623 size_stored
/ UNITS_PER_WORD
);
2626 move_block_from_reg (REGNO (entry_parm
), mem
,
2627 size_stored
/ UNITS_PER_WORD
);
2629 else if (data
->stack_parm
== 0)
2631 push_to_sequence (all
->conversion_insns
);
2632 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2634 all
->conversion_insns
= get_insns ();
2638 data
->stack_parm
= stack_parm
;
2639 SET_DECL_RTL (parm
, stack_parm
);
2642 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2643 parameter. Get it there. Perform all ABI specified conversions. */
2646 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2647 struct assign_parm_data_one
*data
)
2650 enum machine_mode promoted_nominal_mode
;
2651 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2652 bool did_conversion
= false;
2654 /* Store the parm in a pseudoregister during the function, but we may
2655 need to do it in a wider mode. */
2657 /* This is not really promoting for a call. However we need to be
2658 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2659 promoted_nominal_mode
2660 = promote_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
, 1);
2662 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2664 if (!DECL_ARTIFICIAL (parm
))
2665 mark_user_reg (parmreg
);
2667 /* If this was an item that we received a pointer to,
2668 set DECL_RTL appropriately. */
2669 if (data
->passed_pointer
)
2671 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2672 set_mem_attributes (x
, parm
, 1);
2673 SET_DECL_RTL (parm
, x
);
2676 SET_DECL_RTL (parm
, parmreg
);
2678 /* Copy the value into the register. */
2679 if (data
->nominal_mode
!= data
->passed_mode
2680 || promoted_nominal_mode
!= data
->promoted_mode
)
2684 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2685 mode, by the caller. We now have to convert it to
2686 NOMINAL_MODE, if different. However, PARMREG may be in
2687 a different mode than NOMINAL_MODE if it is being stored
2690 If ENTRY_PARM is a hard register, it might be in a register
2691 not valid for operating in its mode (e.g., an odd-numbered
2692 register for a DFmode). In that case, moves are the only
2693 thing valid, so we can't do a convert from there. This
2694 occurs when the calling sequence allow such misaligned
2697 In addition, the conversion may involve a call, which could
2698 clobber parameters which haven't been copied to pseudo
2699 registers yet. Therefore, we must first copy the parm to
2700 a pseudo reg here, and save the conversion until after all
2701 parameters have been moved. */
2703 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2705 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2707 push_to_sequence (all
->conversion_insns
);
2708 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
2710 if (GET_CODE (tempreg
) == SUBREG
2711 && GET_MODE (tempreg
) == data
->nominal_mode
2712 && REG_P (SUBREG_REG (tempreg
))
2713 && data
->nominal_mode
== data
->passed_mode
2714 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
2715 && GET_MODE_SIZE (GET_MODE (tempreg
))
2716 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
2718 /* The argument is already sign/zero extended, so note it
2720 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
2721 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
2724 /* TREE_USED gets set erroneously during expand_assignment. */
2725 save_tree_used
= TREE_USED (parm
);
2726 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
));
2727 TREE_USED (parm
) = save_tree_used
;
2728 all
->conversion_insns
= get_insns ();
2731 did_conversion
= true;
2734 emit_move_insn (parmreg
, validize_mem (data
->entry_parm
));
2736 /* If we were passed a pointer but the actual value can safely live
2737 in a register, put it in one. */
2738 if (data
->passed_pointer
2739 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
2740 /* If by-reference argument was promoted, demote it. */
2741 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
2742 || use_register_for_decl (parm
)))
2744 /* We can't use nominal_mode, because it will have been set to
2745 Pmode above. We must use the actual mode of the parm. */
2746 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
2747 mark_user_reg (parmreg
);
2749 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
2751 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
2752 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2754 push_to_sequence (all
->conversion_insns
);
2755 emit_move_insn (tempreg
, DECL_RTL (parm
));
2756 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
2757 emit_move_insn (parmreg
, tempreg
);
2758 all
->conversion_insns
= get_insns ();
2761 did_conversion
= true;
2764 emit_move_insn (parmreg
, DECL_RTL (parm
));
2766 SET_DECL_RTL (parm
, parmreg
);
2768 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2770 data
->stack_parm
= NULL
;
2773 /* Mark the register as eliminable if we did no conversion and it was
2774 copied from memory at a fixed offset, and the arg pointer was not
2775 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2776 offset formed an invalid address, such memory-equivalences as we
2777 make here would screw up life analysis for it. */
2778 if (data
->nominal_mode
== data
->passed_mode
2780 && data
->stack_parm
!= 0
2781 && MEM_P (data
->stack_parm
)
2782 && data
->locate
.offset
.var
== 0
2783 && reg_mentioned_p (virtual_incoming_args_rtx
,
2784 XEXP (data
->stack_parm
, 0)))
2786 rtx linsn
= get_last_insn ();
2789 /* Mark complex types separately. */
2790 if (GET_CODE (parmreg
) == CONCAT
)
2792 enum machine_mode submode
2793 = GET_MODE_INNER (GET_MODE (parmreg
));
2794 int regnor
= REGNO (XEXP (parmreg
, 0));
2795 int regnoi
= REGNO (XEXP (parmreg
, 1));
2796 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
2797 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
2798 GET_MODE_SIZE (submode
));
2800 /* Scan backwards for the set of the real and
2802 for (sinsn
= linsn
; sinsn
!= 0;
2803 sinsn
= prev_nonnote_insn (sinsn
))
2805 set
= single_set (sinsn
);
2809 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
2811 = gen_rtx_EXPR_LIST (REG_EQUIV
, stacki
,
2813 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
2815 = gen_rtx_EXPR_LIST (REG_EQUIV
, stackr
,
2819 else if ((set
= single_set (linsn
)) != 0
2820 && SET_DEST (set
) == parmreg
)
2822 = gen_rtx_EXPR_LIST (REG_EQUIV
,
2823 data
->stack_parm
, REG_NOTES (linsn
));
2826 /* For pointer data type, suggest pointer register. */
2827 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
2828 mark_reg_pointer (parmreg
,
2829 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
2832 /* A subroutine of assign_parms. Allocate stack space to hold the current
2833 parameter. Get it there. Perform all ABI specified conversions. */
2836 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
2837 struct assign_parm_data_one
*data
)
2839 /* Value must be stored in the stack slot STACK_PARM during function
2841 bool to_conversion
= false;
2843 if (data
->promoted_mode
!= data
->nominal_mode
)
2845 /* Conversion is required. */
2846 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2848 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2850 push_to_sequence (all
->conversion_insns
);
2851 to_conversion
= true;
2853 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
2854 TYPE_UNSIGNED (TREE_TYPE (parm
)));
2856 if (data
->stack_parm
)
2857 /* ??? This may need a big-endian conversion on sparc64. */
2859 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
2862 if (data
->entry_parm
!= data
->stack_parm
)
2866 if (data
->stack_parm
== 0)
2869 = assign_stack_local (GET_MODE (data
->entry_parm
),
2870 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
2871 TYPE_ALIGN (data
->passed_type
));
2872 set_mem_attributes (data
->stack_parm
, parm
, 1);
2875 dest
= validize_mem (data
->stack_parm
);
2876 src
= validize_mem (data
->entry_parm
);
2880 /* Use a block move to handle potentially misaligned entry_parm. */
2882 push_to_sequence (all
->conversion_insns
);
2883 to_conversion
= true;
2885 emit_block_move (dest
, src
,
2886 GEN_INT (int_size_in_bytes (data
->passed_type
)),
2890 emit_move_insn (dest
, src
);
2895 all
->conversion_insns
= get_insns ();
2899 SET_DECL_RTL (parm
, data
->stack_parm
);
2902 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2903 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2906 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
, tree fnargs
)
2909 tree orig_fnargs
= all
->orig_fnargs
;
2911 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2913 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
2914 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
2916 rtx tmp
, real
, imag
;
2917 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
2919 real
= DECL_RTL (fnargs
);
2920 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
2921 if (inner
!= GET_MODE (real
))
2923 real
= gen_lowpart_SUBREG (inner
, real
);
2924 imag
= gen_lowpart_SUBREG (inner
, imag
);
2927 if (TREE_ADDRESSABLE (parm
))
2930 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
2932 /* split_complex_arg put the real and imag parts in
2933 pseudos. Move them to memory. */
2934 tmp
= assign_stack_local (DECL_MODE (parm
), size
,
2935 TYPE_ALIGN (TREE_TYPE (parm
)));
2936 set_mem_attributes (tmp
, parm
, 1);
2937 rmem
= adjust_address_nv (tmp
, inner
, 0);
2938 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
2939 push_to_sequence (all
->conversion_insns
);
2940 emit_move_insn (rmem
, real
);
2941 emit_move_insn (imem
, imag
);
2942 all
->conversion_insns
= get_insns ();
2946 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2947 SET_DECL_RTL (parm
, tmp
);
2949 real
= DECL_INCOMING_RTL (fnargs
);
2950 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
2951 if (inner
!= GET_MODE (real
))
2953 real
= gen_lowpart_SUBREG (inner
, real
);
2954 imag
= gen_lowpart_SUBREG (inner
, imag
);
2956 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2957 set_decl_incoming_rtl (parm
, tmp
);
2958 fnargs
= TREE_CHAIN (fnargs
);
2962 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
2963 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
));
2965 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2966 instead of the copy of decl, i.e. FNARGS. */
2967 if (DECL_INCOMING_RTL (parm
) && MEM_P (DECL_INCOMING_RTL (parm
)))
2968 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
2971 fnargs
= TREE_CHAIN (fnargs
);
2975 /* Assign RTL expressions to the function's parameters. This may involve
2976 copying them into registers and using those registers as the DECL_RTL. */
2979 assign_parms (tree fndecl
)
2981 struct assign_parm_data_all all
;
2984 current_function_internal_arg_pointer
2985 = targetm
.calls
.internal_arg_pointer ();
2987 assign_parms_initialize_all (&all
);
2988 fnargs
= assign_parms_augmented_arg_list (&all
);
2990 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2992 struct assign_parm_data_one data
;
2994 /* Extract the type of PARM; adjust it according to ABI. */
2995 assign_parm_find_data_types (&all
, parm
, &data
);
2997 /* Early out for errors and void parameters. */
2998 if (data
.passed_mode
== VOIDmode
)
3000 SET_DECL_RTL (parm
, const0_rtx
);
3001 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3005 if (current_function_stdarg
&& !TREE_CHAIN (parm
))
3006 assign_parms_setup_varargs (&all
, &data
, false);
3008 /* Find out where the parameter arrives in this function. */
3009 assign_parm_find_entry_rtl (&all
, &data
);
3011 /* Find out where stack space for this parameter might be. */
3012 if (assign_parm_is_stack_parm (&all
, &data
))
3014 assign_parm_find_stack_rtl (parm
, &data
);
3015 assign_parm_adjust_entry_rtl (&data
);
3018 /* Record permanently how this parm was passed. */
3019 set_decl_incoming_rtl (parm
, data
.entry_parm
);
3021 /* Update info on where next arg arrives in registers. */
3022 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3023 data
.passed_type
, data
.named_arg
);
3025 assign_parm_adjust_stack_rtl (&data
);
3027 if (assign_parm_setup_block_p (&data
))
3028 assign_parm_setup_block (&all
, parm
, &data
);
3029 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3030 assign_parm_setup_reg (&all
, parm
, &data
);
3032 assign_parm_setup_stack (&all
, parm
, &data
);
3035 if (targetm
.calls
.split_complex_arg
&& fnargs
!= all
.orig_fnargs
)
3036 assign_parms_unsplit_complex (&all
, fnargs
);
3038 /* Output all parameter conversion instructions (possibly including calls)
3039 now that all parameters have been copied out of hard registers. */
3040 emit_insn (all
.conversion_insns
);
3042 /* If we are receiving a struct value address as the first argument, set up
3043 the RTL for the function result. As this might require code to convert
3044 the transmitted address to Pmode, we do this here to ensure that possible
3045 preliminary conversions of the address have been emitted already. */
3046 if (all
.function_result_decl
)
3048 tree result
= DECL_RESULT (current_function_decl
);
3049 rtx addr
= DECL_RTL (all
.function_result_decl
);
3052 if (DECL_BY_REFERENCE (result
))
3056 addr
= convert_memory_address (Pmode
, addr
);
3057 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3058 set_mem_attributes (x
, result
, 1);
3060 SET_DECL_RTL (result
, x
);
3063 /* We have aligned all the args, so add space for the pretend args. */
3064 current_function_pretend_args_size
= all
.pretend_args_size
;
3065 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3066 current_function_args_size
= all
.stack_args_size
.constant
;
3068 /* Adjust function incoming argument size for alignment and
3071 #ifdef REG_PARM_STACK_SPACE
3072 current_function_args_size
= MAX (current_function_args_size
,
3073 REG_PARM_STACK_SPACE (fndecl
));
3076 current_function_args_size
= CEIL_ROUND (current_function_args_size
,
3077 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3079 #ifdef ARGS_GROW_DOWNWARD
3080 current_function_arg_offset_rtx
3081 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3082 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3083 size_int (-all
.stack_args_size
.constant
)),
3084 NULL_RTX
, VOIDmode
, 0));
3086 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3089 /* See how many bytes, if any, of its args a function should try to pop
3092 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3093 current_function_args_size
);
3095 /* For stdarg.h function, save info about
3096 regs and stack space used by the named args. */
3098 current_function_args_info
= all
.args_so_far
;
3100 /* Set the rtx used for the function return value. Put this in its
3101 own variable so any optimizers that need this information don't have
3102 to include tree.h. Do this here so it gets done when an inlined
3103 function gets output. */
3105 current_function_return_rtx
3106 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3107 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3109 /* If scalar return value was computed in a pseudo-reg, or was a named
3110 return value that got dumped to the stack, copy that to the hard
3112 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3114 tree decl_result
= DECL_RESULT (fndecl
);
3115 rtx decl_rtl
= DECL_RTL (decl_result
);
3117 if (REG_P (decl_rtl
)
3118 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3119 : DECL_REGISTER (decl_result
))
3123 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3125 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3126 /* The delay slot scheduler assumes that current_function_return_rtx
3127 holds the hard register containing the return value, not a
3128 temporary pseudo. */
3129 current_function_return_rtx
= real_decl_rtl
;
3134 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3135 For all seen types, gimplify their sizes. */
3138 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3145 if (POINTER_TYPE_P (t
))
3147 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3148 && !TYPE_SIZES_GIMPLIFIED (t
))
3150 gimplify_type_sizes (t
, (tree
*) data
);
3158 /* Gimplify the parameter list for current_function_decl. This involves
3159 evaluating SAVE_EXPRs of variable sized parameters and generating code
3160 to implement callee-copies reference parameters. Returns a list of
3161 statements to add to the beginning of the function, or NULL if nothing
3165 gimplify_parameters (void)
3167 struct assign_parm_data_all all
;
3168 tree fnargs
, parm
, stmts
= NULL
;
3170 assign_parms_initialize_all (&all
);
3171 fnargs
= assign_parms_augmented_arg_list (&all
);
3173 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3175 struct assign_parm_data_one data
;
3177 /* Extract the type of PARM; adjust it according to ABI. */
3178 assign_parm_find_data_types (&all
, parm
, &data
);
3180 /* Early out for errors and void parameters. */
3181 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3184 /* Update info on where next arg arrives in registers. */
3185 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3186 data
.passed_type
, data
.named_arg
);
3188 /* ??? Once upon a time variable_size stuffed parameter list
3189 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3190 turned out to be less than manageable in the gimple world.
3191 Now we have to hunt them down ourselves. */
3192 walk_tree_without_duplicates (&data
.passed_type
,
3193 gimplify_parm_type
, &stmts
);
3195 if (!TREE_CONSTANT (DECL_SIZE (parm
)))
3197 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3198 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3201 if (data
.passed_pointer
)
3203 tree type
= TREE_TYPE (data
.passed_type
);
3204 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3205 type
, data
.named_arg
))
3209 /* For constant sized objects, this is trivial; for
3210 variable-sized objects, we have to play games. */
3211 if (TREE_CONSTANT (DECL_SIZE (parm
)))
3213 local
= create_tmp_var (type
, get_name (parm
));
3214 DECL_IGNORED_P (local
) = 0;
3218 tree ptr_type
, addr
, args
;
3220 ptr_type
= build_pointer_type (type
);
3221 addr
= create_tmp_var (ptr_type
, get_name (parm
));
3222 DECL_IGNORED_P (addr
) = 0;
3223 local
= build_fold_indirect_ref (addr
);
3225 args
= tree_cons (NULL
, DECL_SIZE_UNIT (parm
), NULL
);
3226 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3227 t
= build_function_call_expr (t
, args
);
3228 t
= fold_convert (ptr_type
, t
);
3229 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
3230 gimplify_and_add (t
, &stmts
);
3233 t
= build2 (MODIFY_EXPR
, void_type_node
, local
, parm
);
3234 gimplify_and_add (t
, &stmts
);
3236 SET_DECL_VALUE_EXPR (parm
, local
);
3237 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3245 /* Indicate whether REGNO is an incoming argument to the current function
3246 that was promoted to a wider mode. If so, return the RTX for the
3247 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3248 that REGNO is promoted from and whether the promotion was signed or
3252 promoted_input_arg (unsigned int regno
, enum machine_mode
*pmode
, int *punsignedp
)
3256 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
3257 arg
= TREE_CHAIN (arg
))
3258 if (REG_P (DECL_INCOMING_RTL (arg
))
3259 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
3260 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
3262 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
3263 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (arg
));
3265 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
3266 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
3267 && mode
!= DECL_MODE (arg
))
3269 *pmode
= DECL_MODE (arg
);
3270 *punsignedp
= unsignedp
;
3271 return DECL_INCOMING_RTL (arg
);
3279 /* Compute the size and offset from the start of the stacked arguments for a
3280 parm passed in mode PASSED_MODE and with type TYPE.
3282 INITIAL_OFFSET_PTR points to the current offset into the stacked
3285 The starting offset and size for this parm are returned in
3286 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3287 nonzero, the offset is that of stack slot, which is returned in
3288 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3289 padding required from the initial offset ptr to the stack slot.
3291 IN_REGS is nonzero if the argument will be passed in registers. It will
3292 never be set if REG_PARM_STACK_SPACE is not defined.
3294 FNDECL is the function in which the argument was defined.
3296 There are two types of rounding that are done. The first, controlled by
3297 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3298 list to be aligned to the specific boundary (in bits). This rounding
3299 affects the initial and starting offsets, but not the argument size.
3301 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3302 optionally rounds the size of the parm to PARM_BOUNDARY. The
3303 initial offset is not affected by this rounding, while the size always
3304 is and the starting offset may be. */
3306 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3307 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3308 callers pass in the total size of args so far as
3309 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3312 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3313 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3314 struct args_size
*initial_offset_ptr
,
3315 struct locate_and_pad_arg_data
*locate
)
3318 enum direction where_pad
;
3319 unsigned int boundary
;
3320 int reg_parm_stack_space
= 0;
3321 int part_size_in_regs
;
3323 #ifdef REG_PARM_STACK_SPACE
3324 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3326 /* If we have found a stack parm before we reach the end of the
3327 area reserved for registers, skip that area. */
3330 if (reg_parm_stack_space
> 0)
3332 if (initial_offset_ptr
->var
)
3334 initial_offset_ptr
->var
3335 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3336 ssize_int (reg_parm_stack_space
));
3337 initial_offset_ptr
->constant
= 0;
3339 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3340 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3343 #endif /* REG_PARM_STACK_SPACE */
3345 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3348 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3349 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3350 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3351 locate
->where_pad
= where_pad
;
3352 locate
->boundary
= boundary
;
3354 /* Remember if the outgoing parameter requires extra alignment on the
3355 calling function side. */
3356 if (boundary
> PREFERRED_STACK_BOUNDARY
)
3357 boundary
= PREFERRED_STACK_BOUNDARY
;
3358 if (cfun
->stack_alignment_needed
< boundary
)
3359 cfun
->stack_alignment_needed
= boundary
;
3361 #ifdef ARGS_GROW_DOWNWARD
3362 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3363 if (initial_offset_ptr
->var
)
3364 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3365 initial_offset_ptr
->var
);
3369 if (where_pad
!= none
3370 && (!host_integerp (sizetree
, 1)
3371 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3372 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3373 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3376 locate
->slot_offset
.constant
+= part_size_in_regs
;
3379 #ifdef REG_PARM_STACK_SPACE
3380 || REG_PARM_STACK_SPACE (fndecl
) > 0
3383 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3384 &locate
->alignment_pad
);
3386 locate
->size
.constant
= (-initial_offset_ptr
->constant
3387 - locate
->slot_offset
.constant
);
3388 if (initial_offset_ptr
->var
)
3389 locate
->size
.var
= size_binop (MINUS_EXPR
,
3390 size_binop (MINUS_EXPR
,
3392 initial_offset_ptr
->var
),
3393 locate
->slot_offset
.var
);
3395 /* Pad_below needs the pre-rounded size to know how much to pad
3397 locate
->offset
= locate
->slot_offset
;
3398 if (where_pad
== downward
)
3399 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3401 #else /* !ARGS_GROW_DOWNWARD */
3403 #ifdef REG_PARM_STACK_SPACE
3404 || REG_PARM_STACK_SPACE (fndecl
) > 0
3407 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3408 &locate
->alignment_pad
);
3409 locate
->slot_offset
= *initial_offset_ptr
;
3411 #ifdef PUSH_ROUNDING
3412 if (passed_mode
!= BLKmode
)
3413 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3416 /* Pad_below needs the pre-rounded size to know how much to pad below
3417 so this must be done before rounding up. */
3418 locate
->offset
= locate
->slot_offset
;
3419 if (where_pad
== downward
)
3420 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3422 if (where_pad
!= none
3423 && (!host_integerp (sizetree
, 1)
3424 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3425 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3427 ADD_PARM_SIZE (locate
->size
, sizetree
);
3429 locate
->size
.constant
-= part_size_in_regs
;
3430 #endif /* ARGS_GROW_DOWNWARD */
3433 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3434 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3437 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3438 struct args_size
*alignment_pad
)
3440 tree save_var
= NULL_TREE
;
3441 HOST_WIDE_INT save_constant
= 0;
3442 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3443 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3445 #ifdef SPARC_STACK_BOUNDARY_HACK
3446 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3447 the real alignment of %sp. However, when it does this, the
3448 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3449 if (SPARC_STACK_BOUNDARY_HACK
)
3453 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3455 save_var
= offset_ptr
->var
;
3456 save_constant
= offset_ptr
->constant
;
3459 alignment_pad
->var
= NULL_TREE
;
3460 alignment_pad
->constant
= 0;
3462 if (boundary
> BITS_PER_UNIT
)
3464 if (offset_ptr
->var
)
3466 tree sp_offset_tree
= ssize_int (sp_offset
);
3467 tree offset
= size_binop (PLUS_EXPR
,
3468 ARGS_SIZE_TREE (*offset_ptr
),
3470 #ifdef ARGS_GROW_DOWNWARD
3471 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3473 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3476 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3477 /* ARGS_SIZE_TREE includes constant term. */
3478 offset_ptr
->constant
= 0;
3479 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3480 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3485 offset_ptr
->constant
= -sp_offset
+
3486 #ifdef ARGS_GROW_DOWNWARD
3487 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3489 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3491 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
3492 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3498 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3500 if (passed_mode
!= BLKmode
)
3502 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3503 offset_ptr
->constant
3504 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3505 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3506 - GET_MODE_SIZE (passed_mode
));
3510 if (TREE_CODE (sizetree
) != INTEGER_CST
3511 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3513 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3514 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3516 ADD_PARM_SIZE (*offset_ptr
, s2
);
3517 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3522 /* Walk the tree of blocks describing the binding levels within a function
3523 and warn about variables the might be killed by setjmp or vfork.
3524 This is done after calling flow_analysis and before global_alloc
3525 clobbers the pseudo-regs to hard regs. */
3528 setjmp_vars_warning (tree block
)
3532 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3534 if (TREE_CODE (decl
) == VAR_DECL
3535 && DECL_RTL_SET_P (decl
)
3536 && REG_P (DECL_RTL (decl
))
3537 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3538 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3543 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
3544 setjmp_vars_warning (sub
);
3547 /* Do the appropriate part of setjmp_vars_warning
3548 but for arguments instead of local variables. */
3551 setjmp_args_warning (void)
3554 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3555 decl
; decl
= TREE_CHAIN (decl
))
3556 if (DECL_RTL (decl
) != 0
3557 && REG_P (DECL_RTL (decl
))
3558 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
3559 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3564 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3565 and create duplicate blocks. */
3566 /* ??? Need an option to either create block fragments or to create
3567 abstract origin duplicates of a source block. It really depends
3568 on what optimization has been performed. */
3571 reorder_blocks (void)
3573 tree block
= DECL_INITIAL (current_function_decl
);
3574 VEC(tree
,heap
) *block_stack
;
3576 if (block
== NULL_TREE
)
3579 block_stack
= VEC_alloc (tree
, heap
, 10);
3581 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3582 clear_block_marks (block
);
3584 /* Prune the old trees away, so that they don't get in the way. */
3585 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
3586 BLOCK_CHAIN (block
) = NULL_TREE
;
3588 /* Recreate the block tree from the note nesting. */
3589 reorder_blocks_1 (get_insns (), block
, &block_stack
);
3590 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
3592 VEC_free (tree
, heap
, block_stack
);
3595 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3598 clear_block_marks (tree block
)
3602 TREE_ASM_WRITTEN (block
) = 0;
3603 clear_block_marks (BLOCK_SUBBLOCKS (block
));
3604 block
= BLOCK_CHAIN (block
);
3609 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
3613 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3617 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
3619 tree block
= NOTE_BLOCK (insn
);
3622 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
3623 ? BLOCK_FRAGMENT_ORIGIN (block
)
3626 /* If we have seen this block before, that means it now
3627 spans multiple address regions. Create a new fragment. */
3628 if (TREE_ASM_WRITTEN (block
))
3630 tree new_block
= copy_node (block
);
3632 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
3633 BLOCK_FRAGMENT_CHAIN (new_block
)
3634 = BLOCK_FRAGMENT_CHAIN (origin
);
3635 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
3637 NOTE_BLOCK (insn
) = new_block
;
3641 BLOCK_SUBBLOCKS (block
) = 0;
3642 TREE_ASM_WRITTEN (block
) = 1;
3643 /* When there's only one block for the entire function,
3644 current_block == block and we mustn't do this, it
3645 will cause infinite recursion. */
3646 if (block
!= current_block
)
3648 if (block
!= origin
)
3649 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
);
3651 BLOCK_SUPERCONTEXT (block
) = current_block
;
3652 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
3653 BLOCK_SUBBLOCKS (current_block
) = block
;
3654 current_block
= origin
;
3656 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
3658 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
3660 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
3661 BLOCK_SUBBLOCKS (current_block
)
3662 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
3663 current_block
= BLOCK_SUPERCONTEXT (current_block
);
3669 /* Reverse the order of elements in the chain T of blocks,
3670 and return the new head of the chain (old last element). */
3673 blocks_nreverse (tree t
)
3675 tree prev
= 0, decl
, next
;
3676 for (decl
= t
; decl
; decl
= next
)
3678 next
= BLOCK_CHAIN (decl
);
3679 BLOCK_CHAIN (decl
) = prev
;
3685 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3686 non-NULL, list them all into VECTOR, in a depth-first preorder
3687 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3691 all_blocks (tree block
, tree
*vector
)
3697 TREE_ASM_WRITTEN (block
) = 0;
3699 /* Record this block. */
3701 vector
[n_blocks
] = block
;
3705 /* Record the subblocks, and their subblocks... */
3706 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
3707 vector
? vector
+ n_blocks
: 0);
3708 block
= BLOCK_CHAIN (block
);
3714 /* Return a vector containing all the blocks rooted at BLOCK. The
3715 number of elements in the vector is stored in N_BLOCKS_P. The
3716 vector is dynamically allocated; it is the caller's responsibility
3717 to call `free' on the pointer returned. */
3720 get_block_vector (tree block
, int *n_blocks_p
)
3724 *n_blocks_p
= all_blocks (block
, NULL
);
3725 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
3726 all_blocks (block
, block_vector
);
3728 return block_vector
;
3731 static GTY(()) int next_block_index
= 2;
3733 /* Set BLOCK_NUMBER for all the blocks in FN. */
3736 number_blocks (tree fn
)
3742 /* For SDB and XCOFF debugging output, we start numbering the blocks
3743 from 1 within each function, rather than keeping a running
3745 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3746 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
3747 next_block_index
= 1;
3750 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
3752 /* The top-level BLOCK isn't numbered at all. */
3753 for (i
= 1; i
< n_blocks
; ++i
)
3754 /* We number the blocks from two. */
3755 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
3757 free (block_vector
);
3762 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3765 debug_find_var_in_block_tree (tree var
, tree block
)
3769 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
3773 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
3775 tree ret
= debug_find_var_in_block_tree (var
, t
);
3783 /* Allocate a function structure for FNDECL and set its contents
3787 allocate_struct_function (tree fndecl
)
3790 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
3792 cfun
= ggc_alloc_cleared (sizeof (struct function
));
3794 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
3795 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
3797 current_function_funcdef_no
= funcdef_no
++;
3799 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
3801 init_eh_for_function ();
3803 lang_hooks
.function
.init (cfun
);
3804 if (init_machine_status
)
3805 cfun
->machine
= (*init_machine_status
) ();
3810 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
3811 cfun
->decl
= fndecl
;
3813 result
= DECL_RESULT (fndecl
);
3814 if (aggregate_value_p (result
, fndecl
))
3816 #ifdef PCC_STATIC_STRUCT_RETURN
3817 current_function_returns_pcc_struct
= 1;
3819 current_function_returns_struct
= 1;
3822 current_function_returns_pointer
= POINTER_TYPE_P (TREE_TYPE (result
));
3824 current_function_stdarg
3826 && TYPE_ARG_TYPES (fntype
) != 0
3827 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3828 != void_type_node
));
3830 /* Assume all registers in stdarg functions need to be saved. */
3831 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
3832 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
3835 /* Reset cfun, and other non-struct-function variables to defaults as
3836 appropriate for emitting rtl at the start of a function. */
3839 prepare_function_start (tree fndecl
)
3841 if (fndecl
&& DECL_STRUCT_FUNCTION (fndecl
))
3842 cfun
= DECL_STRUCT_FUNCTION (fndecl
);
3844 allocate_struct_function (fndecl
);
3846 init_varasm_status (cfun
);
3849 cse_not_expected
= ! optimize
;
3851 /* Caller save not needed yet. */
3852 caller_save_needed
= 0;
3854 /* We haven't done register allocation yet. */
3857 /* Indicate that we have not instantiated virtual registers yet. */
3858 virtuals_instantiated
= 0;
3860 /* Indicate that we want CONCATs now. */
3861 generating_concat_p
= 1;
3863 /* Indicate we have no need of a frame pointer yet. */
3864 frame_pointer_needed
= 0;
3867 /* Initialize the rtl expansion mechanism so that we can do simple things
3868 like generate sequences. This is used to provide a context during global
3869 initialization of some passes. */
3871 init_dummy_function_start (void)
3873 prepare_function_start (NULL
);
3876 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3877 and initialize static variables for generating RTL for the statements
3881 init_function_start (tree subr
)
3883 prepare_function_start (subr
);
3885 /* Prevent ever trying to delete the first instruction of a
3886 function. Also tell final how to output a linenum before the
3887 function prologue. Note linenums could be missing, e.g. when
3888 compiling a Java .class file. */
3889 if (! DECL_IS_BUILTIN (subr
))
3890 emit_line_note (DECL_SOURCE_LOCATION (subr
));
3892 /* Make sure first insn is a note even if we don't want linenums.
3893 This makes sure the first insn will never be deleted.
3894 Also, final expects a note to appear there. */
3895 emit_note (NOTE_INSN_DELETED
);
3897 /* Warn if this value is an aggregate type,
3898 regardless of which calling convention we are using for it. */
3899 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
3900 warning (OPT_Waggregate_return
, "function returns an aggregate");
3903 /* Make sure all values used by the optimization passes have sane
3906 init_function_for_compilation (void)
3910 /* No prologue/epilogue insns yet. Make sure that these vectors are
3912 gcc_assert (VEC_length (int, prologue
) == 0);
3913 gcc_assert (VEC_length (int, epilogue
) == 0);
3914 gcc_assert (VEC_length (int, sibcall_epilogue
) == 0);
3918 struct tree_opt_pass pass_init_function
=
3922 init_function_for_compilation
, /* execute */
3925 0, /* static_pass_number */
3927 0, /* properties_required */
3928 0, /* properties_provided */
3929 0, /* properties_destroyed */
3930 0, /* todo_flags_start */
3931 0, /* todo_flags_finish */
3937 expand_main_function (void)
3939 #if (defined(INVOKE__main) \
3940 || (!defined(HAS_INIT_SECTION) \
3941 && !defined(INIT_SECTION_ASM_OP) \
3942 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3943 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
3947 /* Expand code to initialize the stack_protect_guard. This is invoked at
3948 the beginning of a function to be protected. */
3950 #ifndef HAVE_stack_protect_set
3951 # define HAVE_stack_protect_set 0
3952 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3956 stack_protect_prologue (void)
3958 tree guard_decl
= targetm
.stack_protect_guard ();
3961 /* Avoid expand_expr here, because we don't want guard_decl pulled
3962 into registers unless absolutely necessary. And we know that
3963 cfun->stack_protect_guard is a local stack slot, so this skips
3965 x
= validize_mem (DECL_RTL (cfun
->stack_protect_guard
));
3966 y
= validize_mem (DECL_RTL (guard_decl
));
3968 /* Allow the target to copy from Y to X without leaking Y into a
3970 if (HAVE_stack_protect_set
)
3972 rtx insn
= gen_stack_protect_set (x
, y
);
3980 /* Otherwise do a straight move. */
3981 emit_move_insn (x
, y
);
3984 /* Expand code to verify the stack_protect_guard. This is invoked at
3985 the end of a function to be protected. */
3987 #ifndef HAVE_stack_protect_test
3988 # define HAVE_stack_protect_test 0
3989 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
3993 stack_protect_epilogue (void)
3995 tree guard_decl
= targetm
.stack_protect_guard ();
3996 rtx label
= gen_label_rtx ();
3999 /* Avoid expand_expr here, because we don't want guard_decl pulled
4000 into registers unless absolutely necessary. And we know that
4001 cfun->stack_protect_guard is a local stack slot, so this skips
4003 x
= validize_mem (DECL_RTL (cfun
->stack_protect_guard
));
4004 y
= validize_mem (DECL_RTL (guard_decl
));
4006 /* Allow the target to compare Y with X without leaking either into
4008 switch (HAVE_stack_protect_test
!= 0)
4011 tmp
= gen_stack_protect_test (x
, y
, label
);
4020 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4024 /* The noreturn predictor has been moved to the tree level. The rtl-level
4025 predictors estimate this branch about 20%, which isn't enough to get
4026 things moved out of line. Since this is the only extant case of adding
4027 a noreturn function at the rtl level, it doesn't seem worth doing ought
4028 except adding the prediction by hand. */
4029 tmp
= get_last_insn ();
4031 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4033 expand_expr_stmt (targetm
.stack_protect_fail ());
4037 /* Start the RTL for a new function, and set variables used for
4039 SUBR is the FUNCTION_DECL node.
4040 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4041 the function's parameters, which must be run at any return statement. */
4044 expand_function_start (tree subr
)
4046 /* Make sure volatile mem refs aren't considered
4047 valid operands of arithmetic insns. */
4048 init_recog_no_volatile ();
4050 current_function_profile
4052 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4054 current_function_limit_stack
4055 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4057 /* Make the label for return statements to jump to. Do not special
4058 case machines with special return instructions -- they will be
4059 handled later during jump, ifcvt, or epilogue creation. */
4060 return_label
= gen_label_rtx ();
4062 /* Initialize rtx used to return the value. */
4063 /* Do this before assign_parms so that we copy the struct value address
4064 before any library calls that assign parms might generate. */
4066 /* Decide whether to return the value in memory or in a register. */
4067 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4069 /* Returning something that won't go in a register. */
4070 rtx value_address
= 0;
4072 #ifdef PCC_STATIC_STRUCT_RETURN
4073 if (current_function_returns_pcc_struct
)
4075 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4076 value_address
= assemble_static_space (size
);
4081 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4082 /* Expect to be passed the address of a place to store the value.
4083 If it is passed as an argument, assign_parms will take care of
4087 value_address
= gen_reg_rtx (Pmode
);
4088 emit_move_insn (value_address
, sv
);
4093 rtx x
= value_address
;
4094 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4096 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4097 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4099 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4102 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4103 /* If return mode is void, this decl rtl should not be used. */
4104 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4107 /* Compute the return values into a pseudo reg, which we will copy
4108 into the true return register after the cleanups are done. */
4109 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4110 if (TYPE_MODE (return_type
) != BLKmode
4111 && targetm
.calls
.return_in_msb (return_type
))
4112 /* expand_function_end will insert the appropriate padding in
4113 this case. Use the return value's natural (unpadded) mode
4114 within the function proper. */
4115 SET_DECL_RTL (DECL_RESULT (subr
),
4116 gen_reg_rtx (TYPE_MODE (return_type
)));
4119 /* In order to figure out what mode to use for the pseudo, we
4120 figure out what the mode of the eventual return register will
4121 actually be, and use that. */
4122 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4124 /* Structures that are returned in registers are not
4125 aggregate_value_p, so we may see a PARALLEL or a REG. */
4126 if (REG_P (hard_reg
))
4127 SET_DECL_RTL (DECL_RESULT (subr
),
4128 gen_reg_rtx (GET_MODE (hard_reg
)));
4131 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4132 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4136 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4137 result to the real return register(s). */
4138 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4141 /* Initialize rtx for parameters and local variables.
4142 In some cases this requires emitting insns. */
4143 assign_parms (subr
);
4145 /* If function gets a static chain arg, store it. */
4146 if (cfun
->static_chain_decl
)
4148 tree parm
= cfun
->static_chain_decl
;
4149 rtx local
= gen_reg_rtx (Pmode
);
4151 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
);
4152 SET_DECL_RTL (parm
, local
);
4153 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4155 emit_move_insn (local
, static_chain_incoming_rtx
);
4158 /* If the function receives a non-local goto, then store the
4159 bits we need to restore the frame pointer. */
4160 if (cfun
->nonlocal_goto_save_area
)
4165 /* ??? We need to do this save early. Unfortunately here is
4166 before the frame variable gets declared. Help out... */
4167 expand_var (TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0));
4169 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4170 cfun
->nonlocal_goto_save_area
,
4171 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4172 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4173 r_save
= convert_memory_address (Pmode
, r_save
);
4175 emit_move_insn (r_save
, virtual_stack_vars_rtx
);
4176 update_nonlocal_goto_save_area ();
4179 /* The following was moved from init_function_start.
4180 The move is supposed to make sdb output more accurate. */
4181 /* Indicate the beginning of the function body,
4182 as opposed to parm setup. */
4183 emit_note (NOTE_INSN_FUNCTION_BEG
);
4185 gcc_assert (NOTE_P (get_last_insn ()));
4187 parm_birth_insn
= get_last_insn ();
4189 if (current_function_profile
)
4192 PROFILE_HOOK (current_function_funcdef_no
);
4196 /* After the display initializations is where the stack checking
4198 if(flag_stack_check
)
4199 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4201 /* Make sure there is a line number after the function entry setup code. */
4202 force_next_line_note ();
4205 /* Undo the effects of init_dummy_function_start. */
4207 expand_dummy_function_end (void)
4209 /* End any sequences that failed to be closed due to syntax errors. */
4210 while (in_sequence_p ())
4213 /* Outside function body, can't compute type's actual size
4214 until next function's body starts. */
4216 free_after_parsing (cfun
);
4217 free_after_compilation (cfun
);
4221 /* Call DOIT for each hard register used as a return value from
4222 the current function. */
4225 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4227 rtx outgoing
= current_function_return_rtx
;
4232 if (REG_P (outgoing
))
4233 (*doit
) (outgoing
, arg
);
4234 else if (GET_CODE (outgoing
) == PARALLEL
)
4238 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4240 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4242 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4249 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4251 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
4255 clobber_return_register (void)
4257 diddle_return_value (do_clobber_return_reg
, NULL
);
4259 /* In case we do use pseudo to return value, clobber it too. */
4260 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4262 tree decl_result
= DECL_RESULT (current_function_decl
);
4263 rtx decl_rtl
= DECL_RTL (decl_result
);
4264 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4266 do_clobber_return_reg (decl_rtl
, NULL
);
4272 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4274 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
4278 use_return_register (void)
4280 diddle_return_value (do_use_return_reg
, NULL
);
4283 /* Possibly warn about unused parameters. */
4285 do_warn_unused_parameter (tree fn
)
4289 for (decl
= DECL_ARGUMENTS (fn
);
4290 decl
; decl
= TREE_CHAIN (decl
))
4291 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4292 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
))
4293 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4296 static GTY(()) rtx initial_trampoline
;
4298 /* Generate RTL for the end of the current function. */
4301 expand_function_end (void)
4305 /* If arg_pointer_save_area was referenced only from a nested
4306 function, we will not have initialized it yet. Do that now. */
4307 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
4308 get_arg_pointer_save_area (cfun
);
4310 /* If we are doing stack checking and this function makes calls,
4311 do a stack probe at the start of the function to ensure we have enough
4312 space for another stack frame. */
4313 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
4317 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4321 probe_stack_range (STACK_CHECK_PROTECT
,
4322 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
4325 emit_insn_before (seq
, stack_check_probe_note
);
4330 /* Possibly warn about unused parameters.
4331 When frontend does unit-at-a-time, the warning is already
4332 issued at finalization time. */
4333 if (warn_unused_parameter
4334 && !lang_hooks
.callgraph
.expand_function
)
4335 do_warn_unused_parameter (current_function_decl
);
4337 /* End any sequences that failed to be closed due to syntax errors. */
4338 while (in_sequence_p ())
4341 clear_pending_stack_adjust ();
4342 do_pending_stack_adjust ();
4344 /* Mark the end of the function body.
4345 If control reaches this insn, the function can drop through
4346 without returning a value. */
4347 emit_note (NOTE_INSN_FUNCTION_END
);
4349 /* Must mark the last line number note in the function, so that the test
4350 coverage code can avoid counting the last line twice. This just tells
4351 the code to ignore the immediately following line note, since there
4352 already exists a copy of this note somewhere above. This line number
4353 note is still needed for debugging though, so we can't delete it. */
4354 if (flag_test_coverage
)
4355 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER
);
4357 /* Output a linenumber for the end of the function.
4358 SDB depends on this. */
4359 force_next_line_note ();
4360 emit_line_note (input_location
);
4362 /* Before the return label (if any), clobber the return
4363 registers so that they are not propagated live to the rest of
4364 the function. This can only happen with functions that drop
4365 through; if there had been a return statement, there would
4366 have either been a return rtx, or a jump to the return label.
4368 We delay actual code generation after the current_function_value_rtx
4370 clobber_after
= get_last_insn ();
4372 /* Output the label for the actual return from the function. */
4373 emit_label (return_label
);
4375 if (USING_SJLJ_EXCEPTIONS
)
4377 /* Let except.c know where it should emit the call to unregister
4378 the function context for sjlj exceptions. */
4379 if (flag_exceptions
)
4380 sjlj_emit_function_exit_after (get_last_insn ());
4384 /* @@@ This is a kludge. We want to ensure that instructions that
4385 may trap are not moved into the epilogue by scheduling, because
4386 we don't always emit unwind information for the epilogue.
4387 However, not all machine descriptions define a blockage insn, so
4388 emit an ASM_INPUT to act as one. */
4389 if (flag_non_call_exceptions
)
4390 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
4393 /* If this is an implementation of throw, do what's necessary to
4394 communicate between __builtin_eh_return and the epilogue. */
4395 expand_eh_return ();
4397 /* If scalar return value was computed in a pseudo-reg, or was a named
4398 return value that got dumped to the stack, copy that to the hard
4400 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4402 tree decl_result
= DECL_RESULT (current_function_decl
);
4403 rtx decl_rtl
= DECL_RTL (decl_result
);
4405 if (REG_P (decl_rtl
)
4406 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4407 : DECL_REGISTER (decl_result
))
4409 rtx real_decl_rtl
= current_function_return_rtx
;
4411 /* This should be set in assign_parms. */
4412 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4414 /* If this is a BLKmode structure being returned in registers,
4415 then use the mode computed in expand_return. Note that if
4416 decl_rtl is memory, then its mode may have been changed,
4417 but that current_function_return_rtx has not. */
4418 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4419 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4421 /* If a non-BLKmode return value should be padded at the least
4422 significant end of the register, shift it left by the appropriate
4423 amount. BLKmode results are handled using the group load/store
4425 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
4426 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
4428 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
4429 REGNO (real_decl_rtl
)),
4431 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
4433 /* If a named return value dumped decl_return to memory, then
4434 we may need to re-do the PROMOTE_MODE signed/unsigned
4436 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4438 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4440 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
4441 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
4444 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4446 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4448 /* If expand_function_start has created a PARALLEL for decl_rtl,
4449 move the result to the real return registers. Otherwise, do
4450 a group load from decl_rtl for a named return. */
4451 if (GET_CODE (decl_rtl
) == PARALLEL
)
4452 emit_group_move (real_decl_rtl
, decl_rtl
);
4454 emit_group_load (real_decl_rtl
, decl_rtl
,
4455 TREE_TYPE (decl_result
),
4456 int_size_in_bytes (TREE_TYPE (decl_result
)));
4458 /* In the case of complex integer modes smaller than a word, we'll
4459 need to generate some non-trivial bitfield insertions. Do that
4460 on a pseudo and not the hard register. */
4461 else if (GET_CODE (decl_rtl
) == CONCAT
4462 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
4463 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
4465 int old_generating_concat_p
;
4468 old_generating_concat_p
= generating_concat_p
;
4469 generating_concat_p
= 0;
4470 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
4471 generating_concat_p
= old_generating_concat_p
;
4473 emit_move_insn (tmp
, decl_rtl
);
4474 emit_move_insn (real_decl_rtl
, tmp
);
4477 emit_move_insn (real_decl_rtl
, decl_rtl
);
4481 /* If returning a structure, arrange to return the address of the value
4482 in a place where debuggers expect to find it.
4484 If returning a structure PCC style,
4485 the caller also depends on this value.
4486 And current_function_returns_pcc_struct is not necessarily set. */
4487 if (current_function_returns_struct
4488 || current_function_returns_pcc_struct
)
4490 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
4491 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4494 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
4495 type
= TREE_TYPE (type
);
4497 value_address
= XEXP (value_address
, 0);
4499 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
4500 current_function_decl
, true);
4502 /* Mark this as a function return value so integrate will delete the
4503 assignment and USE below when inlining this function. */
4504 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4506 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4507 value_address
= convert_memory_address (GET_MODE (outgoing
),
4510 emit_move_insn (outgoing
, value_address
);
4512 /* Show return register used to hold result (in this case the address
4514 current_function_return_rtx
= outgoing
;
4517 /* Emit the actual code to clobber return register. */
4522 clobber_return_register ();
4523 expand_naked_return ();
4527 emit_insn_after (seq
, clobber_after
);
4530 /* Output the label for the naked return from the function. */
4531 emit_label (naked_return_label
);
4533 /* If stack protection is enabled for this function, check the guard. */
4534 if (cfun
->stack_protect_guard
)
4535 stack_protect_epilogue ();
4537 /* If we had calls to alloca, and this machine needs
4538 an accurate stack pointer to exit the function,
4539 insert some code to save and restore the stack pointer. */
4540 if (! EXIT_IGNORE_STACK
4541 && current_function_calls_alloca
)
4545 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
4546 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
4549 /* ??? This should no longer be necessary since stupid is no longer with
4550 us, but there are some parts of the compiler (eg reload_combine, and
4551 sh mach_dep_reorg) that still try and compute their own lifetime info
4552 instead of using the general framework. */
4553 use_return_register ();
4557 get_arg_pointer_save_area (struct function
*f
)
4559 rtx ret
= f
->x_arg_pointer_save_area
;
4563 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
4564 f
->x_arg_pointer_save_area
= ret
;
4567 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
4571 /* Save the arg pointer at the beginning of the function. The
4572 generated stack slot may not be a valid memory address, so we
4573 have to check it and fix it if necessary. */
4575 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
4579 push_topmost_sequence ();
4580 emit_insn_after (seq
, entry_of_function ());
4581 pop_topmost_sequence ();
4587 /* Extend a vector that records the INSN_UIDs of INSNS
4588 (a list of one or more insns). */
4591 record_insns (rtx insns
, VEC(int,heap
) **vecp
)
4595 for (tmp
= insns
; tmp
!= NULL_RTX
; tmp
= NEXT_INSN (tmp
))
4596 VEC_safe_push (int, heap
, *vecp
, INSN_UID (tmp
));
4599 /* Set the locator of the insn chain starting at INSN to LOC. */
4601 set_insn_locators (rtx insn
, int loc
)
4603 while (insn
!= NULL_RTX
)
4606 INSN_LOCATOR (insn
) = loc
;
4607 insn
= NEXT_INSN (insn
);
4611 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4612 be running after reorg, SEQUENCE rtl is possible. */
4615 contains (rtx insn
, VEC(int,heap
) **vec
)
4619 if (NONJUMP_INSN_P (insn
)
4620 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4623 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
4624 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4625 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
))
4626 == VEC_index (int, *vec
, j
))
4632 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4633 if (INSN_UID (insn
) == VEC_index (int, *vec
, j
))
4640 prologue_epilogue_contains (rtx insn
)
4642 if (contains (insn
, &prologue
))
4644 if (contains (insn
, &epilogue
))
4650 sibcall_epilogue_contains (rtx insn
)
4652 if (sibcall_epilogue
)
4653 return contains (insn
, &sibcall_epilogue
);
4658 /* Insert gen_return at the end of block BB. This also means updating
4659 block_for_insn appropriately. */
4662 emit_return_into_block (basic_block bb
, rtx line_note
)
4664 emit_jump_insn_after (gen_return (), BB_END (bb
));
4666 emit_note_copy_after (line_note
, PREV_INSN (BB_END (bb
)));
4668 #endif /* HAVE_return */
4670 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4672 /* These functions convert the epilogue into a variant that does not
4673 modify the stack pointer. This is used in cases where a function
4674 returns an object whose size is not known until it is computed.
4675 The called function leaves the object on the stack, leaves the
4676 stack depressed, and returns a pointer to the object.
4678 What we need to do is track all modifications and references to the
4679 stack pointer, deleting the modifications and changing the
4680 references to point to the location the stack pointer would have
4681 pointed to had the modifications taken place.
4683 These functions need to be portable so we need to make as few
4684 assumptions about the epilogue as we can. However, the epilogue
4685 basically contains three things: instructions to reset the stack
4686 pointer, instructions to reload registers, possibly including the
4687 frame pointer, and an instruction to return to the caller.
4689 We must be sure of what a relevant epilogue insn is doing. We also
4690 make no attempt to validate the insns we make since if they are
4691 invalid, we probably can't do anything valid. The intent is that
4692 these routines get "smarter" as more and more machines start to use
4693 them and they try operating on different epilogues.
4695 We use the following structure to track what the part of the
4696 epilogue that we've already processed has done. We keep two copies
4697 of the SP equivalence, one for use during the insn we are
4698 processing and one for use in the next insn. The difference is
4699 because one part of a PARALLEL may adjust SP and the other may use
4704 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
4705 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
4706 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
4707 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
4708 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
4709 should be set to once we no longer need
4711 rtx const_equiv
[FIRST_PSEUDO_REGISTER
]; /* Any known constant equivalences
4715 static void handle_epilogue_set (rtx
, struct epi_info
*);
4716 static void update_epilogue_consts (rtx
, rtx
, void *);
4717 static void emit_equiv_load (struct epi_info
*);
4719 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4720 no modifications to the stack pointer. Return the new list of insns. */
4723 keep_stack_depressed (rtx insns
)
4726 struct epi_info info
;
4729 /* If the epilogue is just a single instruction, it must be OK as is. */
4730 if (NEXT_INSN (insns
) == NULL_RTX
)
4733 /* Otherwise, start a sequence, initialize the information we have, and
4734 process all the insns we were given. */
4737 info
.sp_equiv_reg
= stack_pointer_rtx
;
4739 info
.equiv_reg_src
= 0;
4741 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
4742 info
.const_equiv
[j
] = 0;
4746 while (insn
!= NULL_RTX
)
4748 next
= NEXT_INSN (insn
);
4757 /* If this insn references the register that SP is equivalent to and
4758 we have a pending load to that register, we must force out the load
4759 first and then indicate we no longer know what SP's equivalent is. */
4760 if (info
.equiv_reg_src
!= 0
4761 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
4763 emit_equiv_load (&info
);
4764 info
.sp_equiv_reg
= 0;
4767 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
4768 info
.new_sp_offset
= info
.sp_offset
;
4770 /* If this is a (RETURN) and the return address is on the stack,
4771 update the address and change to an indirect jump. */
4772 if (GET_CODE (PATTERN (insn
)) == RETURN
4773 || (GET_CODE (PATTERN (insn
)) == PARALLEL
4774 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
4776 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
4778 HOST_WIDE_INT offset
= 0;
4779 rtx jump_insn
, jump_set
;
4781 /* If the return address is in a register, we can emit the insn
4782 unchanged. Otherwise, it must be a MEM and we see what the
4783 base register and offset are. In any case, we have to emit any
4784 pending load to the equivalent reg of SP, if any. */
4785 if (REG_P (retaddr
))
4787 emit_equiv_load (&info
);
4795 gcc_assert (MEM_P (retaddr
));
4797 ret_ptr
= XEXP (retaddr
, 0);
4799 if (REG_P (ret_ptr
))
4801 base
= gen_rtx_REG (Pmode
, REGNO (ret_ptr
));
4806 gcc_assert (GET_CODE (ret_ptr
) == PLUS
4807 && REG_P (XEXP (ret_ptr
, 0))
4808 && GET_CODE (XEXP (ret_ptr
, 1)) == CONST_INT
);
4809 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (ret_ptr
, 0)));
4810 offset
= INTVAL (XEXP (ret_ptr
, 1));
4814 /* If the base of the location containing the return pointer
4815 is SP, we must update it with the replacement address. Otherwise,
4816 just build the necessary MEM. */
4817 retaddr
= plus_constant (base
, offset
);
4818 if (base
== stack_pointer_rtx
)
4819 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
4820 plus_constant (info
.sp_equiv_reg
,
4823 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
4824 MEM_NOTRAP_P (retaddr
) = 1;
4826 /* If there is a pending load to the equivalent register for SP
4827 and we reference that register, we must load our address into
4828 a scratch register and then do that load. */
4829 if (info
.equiv_reg_src
4830 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
4835 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
4836 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
4837 && !fixed_regs
[regno
]
4838 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
4840 (EXIT_BLOCK_PTR
->il
.rtl
->global_live_at_start
, regno
)
4841 && !refers_to_regno_p (regno
,
4842 regno
+ hard_regno_nregs
[regno
]
4844 info
.equiv_reg_src
, NULL
)
4845 && info
.const_equiv
[regno
] == 0)
4848 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
4850 reg
= gen_rtx_REG (Pmode
, regno
);
4851 emit_move_insn (reg
, retaddr
);
4855 emit_equiv_load (&info
);
4856 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
4858 /* Show the SET in the above insn is a RETURN. */
4859 jump_set
= single_set (jump_insn
);
4860 gcc_assert (jump_set
);
4861 SET_IS_RETURN_P (jump_set
) = 1;
4864 /* If SP is not mentioned in the pattern and its equivalent register, if
4865 any, is not modified, just emit it. Otherwise, if neither is set,
4866 replace the reference to SP and emit the insn. If none of those are
4867 true, handle each SET individually. */
4868 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
4869 && (info
.sp_equiv_reg
== stack_pointer_rtx
4870 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4872 else if (! reg_set_p (stack_pointer_rtx
, insn
)
4873 && (info
.sp_equiv_reg
== stack_pointer_rtx
4874 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
4878 changed
= validate_replace_rtx (stack_pointer_rtx
,
4879 plus_constant (info
.sp_equiv_reg
,
4882 gcc_assert (changed
);
4886 else if (GET_CODE (PATTERN (insn
)) == SET
)
4887 handle_epilogue_set (PATTERN (insn
), &info
);
4888 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
4890 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
4891 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
4892 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
4897 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
4898 info
.sp_offset
= info
.new_sp_offset
;
4900 /* Now update any constants this insn sets. */
4901 note_stores (PATTERN (insn
), update_epilogue_consts
, &info
);
4905 insns
= get_insns ();
4910 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4911 structure that contains information about what we've seen so far. We
4912 process this SET by either updating that data or by emitting one or
4916 handle_epilogue_set (rtx set
, struct epi_info
*p
)
4918 /* First handle the case where we are setting SP. Record what it is being
4919 set from, which we must be able to determine */
4920 if (reg_set_p (stack_pointer_rtx
, set
))
4922 gcc_assert (SET_DEST (set
) == stack_pointer_rtx
);
4924 if (GET_CODE (SET_SRC (set
)) == PLUS
)
4926 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
4927 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
4928 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
4931 gcc_assert (REG_P (XEXP (SET_SRC (set
), 1))
4932 && (REGNO (XEXP (SET_SRC (set
), 1))
4933 < FIRST_PSEUDO_REGISTER
)
4934 && p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
4936 = INTVAL (p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
4940 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
4942 /* If we are adjusting SP, we adjust from the old data. */
4943 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
4945 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
4946 p
->new_sp_offset
+= p
->sp_offset
;
4949 gcc_assert (p
->new_sp_equiv_reg
&& REG_P (p
->new_sp_equiv_reg
));
4954 /* Next handle the case where we are setting SP's equivalent
4955 register. We must not already have a value to set it to. We
4956 could update, but there seems little point in handling that case.
4957 Note that we have to allow for the case where we are setting the
4958 register set in the previous part of a PARALLEL inside a single
4959 insn. But use the old offset for any updates within this insn.
4960 We must allow for the case where the register is being set in a
4961 different (usually wider) mode than Pmode). */
4962 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
4964 gcc_assert (!p
->equiv_reg_src
4965 && REG_P (p
->new_sp_equiv_reg
)
4966 && REG_P (SET_DEST (set
))
4967 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
)))
4969 && REGNO (p
->new_sp_equiv_reg
) == REGNO (SET_DEST (set
)));
4971 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4972 plus_constant (p
->sp_equiv_reg
,
4976 /* Otherwise, replace any references to SP in the insn to its new value
4977 and emit the insn. */
4980 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
4981 plus_constant (p
->sp_equiv_reg
,
4983 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
4984 plus_constant (p
->sp_equiv_reg
,
4990 /* Update the tracking information for registers set to constants. */
4993 update_epilogue_consts (rtx dest
, rtx x
, void *data
)
4995 struct epi_info
*p
= (struct epi_info
*) data
;
4998 if (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
5001 /* If we are either clobbering a register or doing a partial set,
5002 show we don't know the value. */
5003 else if (GET_CODE (x
) == CLOBBER
|| ! rtx_equal_p (dest
, SET_DEST (x
)))
5004 p
->const_equiv
[REGNO (dest
)] = 0;
5006 /* If we are setting it to a constant, record that constant. */
5007 else if (GET_CODE (SET_SRC (x
)) == CONST_INT
)
5008 p
->const_equiv
[REGNO (dest
)] = SET_SRC (x
);
5010 /* If this is a binary operation between a register we have been tracking
5011 and a constant, see if we can compute a new constant value. */
5012 else if (ARITHMETIC_P (SET_SRC (x
))
5013 && REG_P (XEXP (SET_SRC (x
), 0))
5014 && REGNO (XEXP (SET_SRC (x
), 0)) < FIRST_PSEUDO_REGISTER
5015 && p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))] != 0
5016 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
5017 && 0 != (new = simplify_binary_operation
5018 (GET_CODE (SET_SRC (x
)), GET_MODE (dest
),
5019 p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))],
5020 XEXP (SET_SRC (x
), 1)))
5021 && GET_CODE (new) == CONST_INT
)
5022 p
->const_equiv
[REGNO (dest
)] = new;
5024 /* Otherwise, we can't do anything with this value. */
5026 p
->const_equiv
[REGNO (dest
)] = 0;
5029 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5032 emit_equiv_load (struct epi_info
*p
)
5034 if (p
->equiv_reg_src
!= 0)
5036 rtx dest
= p
->sp_equiv_reg
;
5038 if (GET_MODE (p
->equiv_reg_src
) != GET_MODE (dest
))
5039 dest
= gen_rtx_REG (GET_MODE (p
->equiv_reg_src
),
5040 REGNO (p
->sp_equiv_reg
));
5042 emit_move_insn (dest
, p
->equiv_reg_src
);
5043 p
->equiv_reg_src
= 0;
5048 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5049 this into place with notes indicating where the prologue ends and where
5050 the epilogue begins. Update the basic block information when possible. */
5053 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED
)
5057 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5060 #ifdef HAVE_prologue
5061 rtx prologue_end
= NULL_RTX
;
5063 #if defined (HAVE_epilogue) || defined(HAVE_return)
5064 rtx epilogue_end
= NULL_RTX
;
5068 #ifdef HAVE_prologue
5072 seq
= gen_prologue ();
5075 /* Retain a map of the prologue insns. */
5076 record_insns (seq
, &prologue
);
5077 prologue_end
= emit_note (NOTE_INSN_PROLOGUE_END
);
5079 #ifndef PROFILE_BEFORE_PROLOGUE
5080 /* Ensure that instructions are not moved into the prologue when
5081 profiling is on. The call to the profiling routine can be
5082 emitted within the live range of a call-clobbered register. */
5083 if (current_function_profile
)
5084 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
5089 set_insn_locators (seq
, prologue_locator
);
5091 /* Can't deal with multiple successors of the entry block
5092 at the moment. Function should always have at least one
5094 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5096 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
5101 /* If the exit block has no non-fake predecessors, we don't need
5103 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5104 if ((e
->flags
& EDGE_FAKE
) == 0)
5110 if (optimize
&& HAVE_return
)
5112 /* If we're allowed to generate a simple return instruction,
5113 then by definition we don't need a full epilogue. Examine
5114 the block that falls through to EXIT. If it does not
5115 contain any code, examine its predecessors and try to
5116 emit (conditional) return instructions. */
5121 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5122 if (e
->flags
& EDGE_FALLTHRU
)
5128 /* Verify that there are no active instructions in the last block. */
5129 label
= BB_END (last
);
5130 while (label
&& !LABEL_P (label
))
5132 if (active_insn_p (label
))
5134 label
= PREV_INSN (label
);
5137 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
5140 rtx epilogue_line_note
= NULL_RTX
;
5142 /* Locate the line number associated with the closing brace,
5143 if we can find one. */
5144 for (seq
= get_last_insn ();
5145 seq
&& ! active_insn_p (seq
);
5146 seq
= PREV_INSN (seq
))
5147 if (NOTE_P (seq
) && NOTE_LINE_NUMBER (seq
) > 0)
5149 epilogue_line_note
= seq
;
5153 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
5155 basic_block bb
= e
->src
;
5158 if (bb
== ENTRY_BLOCK_PTR
)
5165 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5171 /* If we have an unconditional jump, we can replace that
5172 with a simple return instruction. */
5173 if (simplejump_p (jump
))
5175 emit_return_into_block (bb
, epilogue_line_note
);
5179 /* If we have a conditional jump, we can try to replace
5180 that with a conditional return instruction. */
5181 else if (condjump_p (jump
))
5183 if (! redirect_jump (jump
, 0, 0))
5189 /* If this block has only one successor, it both jumps
5190 and falls through to the fallthru block, so we can't
5192 if (single_succ_p (bb
))
5204 /* Fix up the CFG for the successful change we just made. */
5205 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5208 /* Emit a return insn for the exit fallthru block. Whether
5209 this is still reachable will be determined later. */
5211 emit_barrier_after (BB_END (last
));
5212 emit_return_into_block (last
, epilogue_line_note
);
5213 epilogue_end
= BB_END (last
);
5214 single_succ_edge (last
)->flags
&= ~EDGE_FALLTHRU
;
5219 /* Find the edge that falls through to EXIT. Other edges may exist
5220 due to RETURN instructions, but those don't need epilogues.
5221 There really shouldn't be a mixture -- either all should have
5222 been converted or none, however... */
5224 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5225 if (e
->flags
& EDGE_FALLTHRU
)
5230 #ifdef HAVE_epilogue
5234 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5236 seq
= gen_epilogue ();
5238 #ifdef INCOMING_RETURN_ADDR_RTX
5239 /* If this function returns with the stack depressed and we can support
5240 it, massage the epilogue to actually do that. */
5241 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
5242 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
5243 seq
= keep_stack_depressed (seq
);
5246 emit_jump_insn (seq
);
5248 /* Retain a map of the epilogue insns. */
5249 record_insns (seq
, &epilogue
);
5250 set_insn_locators (seq
, epilogue_locator
);
5255 insert_insn_on_edge (seq
, e
);
5263 if (! next_active_insn (BB_END (e
->src
)))
5265 /* We have a fall-through edge to the exit block, the source is not
5266 at the end of the function, and there will be an assembler epilogue
5267 at the end of the function.
5268 We can't use force_nonfallthru here, because that would try to
5269 use return. Inserting a jump 'by hand' is extremely messy, so
5270 we take advantage of cfg_layout_finalize using
5271 fixup_fallthru_exit_predecessor. */
5272 cfg_layout_initialize (0);
5273 FOR_EACH_BB (cur_bb
)
5274 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5275 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5276 cur_bb
->aux
= cur_bb
->next_bb
;
5277 cfg_layout_finalize ();
5282 commit_edge_insertions ();
5284 #ifdef HAVE_sibcall_epilogue
5285 /* Emit sibling epilogues before any sibling call sites. */
5286 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5288 basic_block bb
= e
->src
;
5289 rtx insn
= BB_END (bb
);
5292 || ! SIBLING_CALL_P (insn
))
5299 emit_insn (gen_sibcall_epilogue ());
5303 /* Retain a map of the epilogue insns. Used in life analysis to
5304 avoid getting rid of sibcall epilogue insns. Do this before we
5305 actually emit the sequence. */
5306 record_insns (seq
, &sibcall_epilogue
);
5307 set_insn_locators (seq
, epilogue_locator
);
5309 emit_insn_before (seq
, insn
);
5314 #ifdef HAVE_prologue
5315 /* This is probably all useless now that we use locators. */
5320 /* GDB handles `break f' by setting a breakpoint on the first
5321 line note after the prologue. Which means (1) that if
5322 there are line number notes before where we inserted the
5323 prologue we should move them, and (2) we should generate a
5324 note before the end of the first basic block, if there isn't
5327 ??? This behavior is completely broken when dealing with
5328 multiple entry functions. We simply place the note always
5329 into first basic block and let alternate entry points
5333 for (insn
= prologue_end
; insn
; insn
= prev
)
5335 prev
= PREV_INSN (insn
);
5336 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5338 /* Note that we cannot reorder the first insn in the
5339 chain, since rest_of_compilation relies on that
5340 remaining constant. */
5343 reorder_insns (insn
, insn
, prologue_end
);
5347 /* Find the last line number note in the first block. */
5348 for (insn
= BB_END (ENTRY_BLOCK_PTR
->next_bb
);
5349 insn
!= prologue_end
&& insn
;
5350 insn
= PREV_INSN (insn
))
5351 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5354 /* If we didn't find one, make a copy of the first line number
5358 for (insn
= next_active_insn (prologue_end
);
5360 insn
= PREV_INSN (insn
))
5361 if (NOTE_P (insn
) && NOTE_LINE_NUMBER (insn
) > 0)
5363 emit_note_copy_after (insn
, prologue_end
);
5369 #ifdef HAVE_epilogue
5374 /* Similarly, move any line notes that appear after the epilogue.
5375 There is no need, however, to be quite so anal about the existence
5376 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5377 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5379 for (insn
= epilogue_end
; insn
; insn
= next
)
5381 next
= NEXT_INSN (insn
);
5383 && (NOTE_LINE_NUMBER (insn
) > 0
5384 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
5385 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
))
5386 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5392 /* Reposition the prologue-end and epilogue-begin notes after instruction
5393 scheduling and delayed branch scheduling. */
5396 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED
)
5398 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5399 rtx insn
, last
, note
;
5402 if ((len
= VEC_length (int, prologue
)) > 0)
5406 /* Scan from the beginning until we reach the last prologue insn.
5407 We apparently can't depend on basic_block_{head,end} after
5409 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
5413 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5416 else if (contains (insn
, &prologue
))
5426 /* Find the prologue-end note if we haven't already, and
5427 move it to just after the last prologue insn. */
5430 for (note
= last
; (note
= NEXT_INSN (note
));)
5432 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5436 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5438 last
= NEXT_INSN (last
);
5439 reorder_insns (note
, note
, last
);
5443 if ((len
= VEC_length (int, epilogue
)) > 0)
5447 /* Scan from the end until we reach the first epilogue insn.
5448 We apparently can't depend on basic_block_{head,end} after
5450 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
5454 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5457 else if (contains (insn
, &epilogue
))
5467 /* Find the epilogue-begin note if we haven't already, and
5468 move it to just before the first epilogue insn. */
5471 for (note
= insn
; (note
= PREV_INSN (note
));)
5473 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5477 if (PREV_INSN (last
) != note
)
5478 reorder_insns (note
, note
, PREV_INSN (last
));
5481 #endif /* HAVE_prologue or HAVE_epilogue */
5484 /* Resets insn_block_boundaries array. */
5487 reset_block_changes (void)
5489 cfun
->ib_boundaries_block
= VEC_alloc (tree
, gc
, 100);
5490 VEC_quick_push (tree
, cfun
->ib_boundaries_block
, NULL_TREE
);
5493 /* Record the boundary for BLOCK. */
5495 record_block_change (tree block
)
5503 if(!cfun
->ib_boundaries_block
)
5506 last_block
= VEC_pop (tree
, cfun
->ib_boundaries_block
);
5508 for (i
= VEC_length (tree
, cfun
->ib_boundaries_block
); i
< n
; i
++)
5509 VEC_safe_push (tree
, gc
, cfun
->ib_boundaries_block
, last_block
);
5511 VEC_safe_push (tree
, gc
, cfun
->ib_boundaries_block
, block
);
5514 /* Finishes record of boundaries. */
5516 finalize_block_changes (void)
5518 record_block_change (DECL_INITIAL (current_function_decl
));
5521 /* For INSN return the BLOCK it belongs to. */
5523 check_block_change (rtx insn
, tree
*block
)
5525 unsigned uid
= INSN_UID (insn
);
5527 if (uid
>= VEC_length (tree
, cfun
->ib_boundaries_block
))
5530 *block
= VEC_index (tree
, cfun
->ib_boundaries_block
, uid
);
5533 /* Releases the ib_boundaries_block records. */
5535 free_block_changes (void)
5537 VEC_free (tree
, gc
, cfun
->ib_boundaries_block
);
5540 /* Returns the name of the current function. */
5542 current_function_name (void)
5544 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5549 rest_of_handle_check_leaf_regs (void)
5551 #ifdef LEAF_REGISTERS
5552 current_function_uses_only_leaf_regs
5553 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
5558 /* Insert a TYPE into the used types hash table of CFUN. */
5560 used_types_insert_helper (tree type
, struct function
*func
)
5562 if (type
!= NULL
&& func
!= NULL
)
5566 if (func
->used_types_hash
== NULL
)
5567 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
5568 htab_eq_pointer
, NULL
);
5569 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
5575 /* Given a type, insert it into the used hash table in cfun. */
5577 used_types_insert (tree t
)
5579 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
5581 t
= TYPE_MAIN_VARIANT (t
);
5582 if (debug_info_level
> DINFO_LEVEL_NONE
)
5583 used_types_insert_helper (t
, cfun
);
5586 struct tree_opt_pass pass_leaf_regs
=
5590 rest_of_handle_check_leaf_regs
, /* execute */
5593 0, /* static_pass_number */
5595 0, /* properties_required */
5596 0, /* properties_provided */
5597 0, /* properties_destroyed */
5598 0, /* todo_flags_start */
5599 0, /* todo_flags_finish */
5604 #include "gt-function.h"