1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
58 #include "integrate.h"
59 #include "langhooks.h"
61 #include "cfglayout.h"
63 #include "tree-pass.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
99 int current_function_is_leaf
;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging
;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs
;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated
;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no
;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function
* (*init_machine_status
) (void);
124 /* The currently compiled function. */
125 struct function
*cfun
= 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap
) *prologue
;
129 static VEC(int,heap
) *epilogue
;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 static VEC(int,heap
) *sibcall_epilogue
;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot
GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot
*next
;
157 /* Points to previous temporary slot. */
158 struct temp_slot
*prev
;
160 /* The rtx to used to reference the slot. */
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 /* The alignment (in bits) of the slot. */
167 /* The size, in units, of the slot. */
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
174 /* Nonzero if this temporary is currently in use. */
176 /* Nonzero if this temporary has its address taken. */
178 /* Nesting level at which this slot is being used. */
180 /* Nonzero if this should survive a call to free_temp_slots. */
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset
;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size
;
190 /* Forward declarations. */
192 static struct temp_slot
*find_temp_slot_from_address (rtx
);
193 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
194 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
195 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
196 static int all_blocks (tree
, tree
*);
197 static tree
*get_block_vector (tree
, int *);
198 extern tree
debug_find_var_in_block_tree (tree
, tree
);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx
, VEC(int,heap
) **) ATTRIBUTE_UNUSED
;
202 static int contains (const_rtx
, VEC(int,heap
) **);
204 static void emit_return_into_block (basic_block
);
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx
, void *);
208 static void do_use_return_reg (rtx
, void *);
209 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
211 /* Stack of nested functions. */
212 /* Keep track of the cfun stack. */
214 typedef struct function
*function_p
;
216 DEF_VEC_P(function_p
);
217 DEF_VEC_ALLOC_P(function_p
,heap
);
218 static VEC(function_p
,heap
) *function_context_stack
;
220 /* Save the current context for compilation of a nested function.
221 This is called from language-specific code. */
224 push_function_context (void)
227 allocate_struct_function (NULL
, false);
229 VEC_safe_push (function_p
, heap
, function_context_stack
, cfun
);
233 /* Restore the last saved context, at the end of a nested function.
234 This function is called from language-specific code. */
237 pop_function_context (void)
239 struct function
*p
= VEC_pop (function_p
, function_context_stack
);
241 current_function_decl
= p
->decl
;
243 /* Reset variables that have known state during rtx generation. */
244 virtuals_instantiated
= 0;
245 generating_concat_p
= 1;
248 /* Clear out all parts of the state in F that can safely be discarded
249 after the function has been parsed, but not compiled, to let
250 garbage collection reclaim the memory. */
253 free_after_parsing (struct function
*f
)
258 /* Clear out all parts of the state in F that can safely be discarded
259 after the function has been compiled, to let garbage collection
260 reclaim the memory. */
263 free_after_compilation (struct function
*f
)
265 VEC_free (int, heap
, prologue
);
266 VEC_free (int, heap
, epilogue
);
267 VEC_free (int, heap
, sibcall_epilogue
);
268 if (crtl
->emit
.regno_pointer_align
)
269 free (crtl
->emit
.regno_pointer_align
);
271 memset (crtl
, 0, sizeof (struct rtl_data
));
276 regno_reg_rtx
= NULL
;
277 insn_locators_free ();
280 /* Return size needed for stack frame based on slots so far allocated.
281 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
282 the caller may have to do that. */
285 get_frame_size (void)
287 if (FRAME_GROWS_DOWNWARD
)
288 return -frame_offset
;
293 /* Issue an error message and return TRUE if frame OFFSET overflows in
294 the signed target pointer arithmetics for function FUNC. Otherwise
298 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
300 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
302 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
303 /* Leave room for the fixed part of the frame. */
304 - 64 * UNITS_PER_WORD
)
306 error ("%Jtotal size of local objects too large", func
);
313 /* Return stack slot alignment in bits for TYPE and MODE. */
316 get_stack_local_alignment (tree type
, enum machine_mode mode
)
318 unsigned int alignment
;
321 alignment
= BIGGEST_ALIGNMENT
;
323 alignment
= GET_MODE_ALIGNMENT (mode
);
325 /* Allow the frond-end to (possibly) increase the alignment of this
328 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
330 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
333 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
334 with machine mode MODE.
336 ALIGN controls the amount of alignment for the address of the slot:
337 0 means according to MODE,
338 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
339 -2 means use BITS_PER_UNIT,
340 positive specifies alignment boundary in bits.
342 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
344 We do not round to stack_boundary here. */
347 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
349 bool reduce_alignment_ok ATTRIBUTE_UNUSED
)
352 int bigend_correction
= 0;
353 unsigned int alignment
, alignment_in_bits
;
354 int frame_off
, frame_alignment
, frame_phase
;
358 alignment
= get_stack_local_alignment (NULL
, mode
);
359 alignment
/= BITS_PER_UNIT
;
361 else if (align
== -1)
363 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
364 size
= CEIL_ROUND (size
, alignment
);
366 else if (align
== -2)
367 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
369 alignment
= align
/ BITS_PER_UNIT
;
371 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
373 if (FRAME_GROWS_DOWNWARD
)
374 frame_offset
-= size
;
376 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
377 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
379 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
380 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
383 if (SUPPORTS_STACK_ALIGNMENT
)
385 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
387 if (!crtl
->stack_realign_processed
)
388 crtl
->stack_alignment_estimated
= alignment_in_bits
;
391 /* If stack is realigned and stack alignment value
392 hasn't been finalized, it is OK not to increase
393 stack_alignment_estimated. The bigger alignment
394 requirement is recorded in stack_alignment_needed
396 gcc_assert (!crtl
->stack_realign_finalized
);
397 if (!crtl
->stack_realign_needed
)
399 /* It is OK to reduce the alignment as long as the
400 requested size is 0 or the estimated stack
401 alignment >= mode alignment. */
402 gcc_assert (reduce_alignment_ok
404 || (crtl
->stack_alignment_estimated
405 >= GET_MODE_ALIGNMENT (mode
)));
406 alignment_in_bits
= crtl
->stack_alignment_estimated
;
407 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
413 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
414 crtl
->stack_alignment_needed
= alignment_in_bits
;
415 if (crtl
->max_used_stack_slot_alignment
< crtl
->stack_alignment_needed
)
416 crtl
->max_used_stack_slot_alignment
= crtl
->stack_alignment_needed
;
418 /* Calculate how many bytes the start of local variables is off from
420 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
421 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
422 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
424 /* Round the frame offset to the specified alignment. The default is
425 to always honor requests to align the stack but a port may choose to
426 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
427 if (STACK_ALIGNMENT_NEEDED
431 /* We must be careful here, since FRAME_OFFSET might be negative and
432 division with a negative dividend isn't as well defined as we might
433 like. So we instead assume that ALIGNMENT is a power of two and
434 use logical operations which are unambiguous. */
435 if (FRAME_GROWS_DOWNWARD
)
437 = (FLOOR_ROUND (frame_offset
- frame_phase
,
438 (unsigned HOST_WIDE_INT
) alignment
)
442 = (CEIL_ROUND (frame_offset
- frame_phase
,
443 (unsigned HOST_WIDE_INT
) alignment
)
447 /* On a big-endian machine, if we are allocating more space than we will use,
448 use the least significant bytes of those that are allocated. */
449 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
450 bigend_correction
= size
- GET_MODE_SIZE (mode
);
452 /* If we have already instantiated virtual registers, return the actual
453 address relative to the frame pointer. */
454 if (virtuals_instantiated
)
455 addr
= plus_constant (frame_pointer_rtx
,
457 (frame_offset
+ bigend_correction
458 + STARTING_FRAME_OFFSET
, Pmode
));
460 addr
= plus_constant (virtual_stack_vars_rtx
,
462 (frame_offset
+ bigend_correction
,
465 if (!FRAME_GROWS_DOWNWARD
)
466 frame_offset
+= size
;
468 x
= gen_rtx_MEM (mode
, addr
);
469 set_mem_align (x
, alignment_in_bits
);
470 MEM_NOTRAP_P (x
) = 1;
473 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
475 if (frame_offset_overflow (frame_offset
, current_function_decl
))
481 /* Wrap up assign_stack_local_1 with last parameter as false. */
484 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
486 return assign_stack_local_1 (mode
, size
, align
, false);
489 /* Removes temporary slot TEMP from LIST. */
492 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
495 temp
->next
->prev
= temp
->prev
;
497 temp
->prev
->next
= temp
->next
;
501 temp
->prev
= temp
->next
= NULL
;
504 /* Inserts temporary slot TEMP to LIST. */
507 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
511 (*list
)->prev
= temp
;
516 /* Returns the list of used temp slots at LEVEL. */
518 static struct temp_slot
**
519 temp_slots_at_level (int level
)
521 if (level
>= (int) VEC_length (temp_slot_p
, used_temp_slots
))
522 VEC_safe_grow_cleared (temp_slot_p
, gc
, used_temp_slots
, level
+ 1);
524 return &(VEC_address (temp_slot_p
, used_temp_slots
)[level
]);
527 /* Returns the maximal temporary slot level. */
530 max_slot_level (void)
532 if (!used_temp_slots
)
535 return VEC_length (temp_slot_p
, used_temp_slots
) - 1;
538 /* Moves temporary slot TEMP to LEVEL. */
541 move_slot_to_level (struct temp_slot
*temp
, int level
)
543 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
544 insert_slot_to_list (temp
, temp_slots_at_level (level
));
548 /* Make temporary slot TEMP available. */
551 make_slot_available (struct temp_slot
*temp
)
553 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
554 insert_slot_to_list (temp
, &avail_temp_slots
);
559 /* Allocate a temporary stack slot and record it for possible later
562 MODE is the machine mode to be given to the returned rtx.
564 SIZE is the size in units of the space required. We do no rounding here
565 since assign_stack_local will do any required rounding.
567 KEEP is 1 if this slot is to be retained after a call to
568 free_temp_slots. Automatic variables for a block are allocated
569 with this flag. KEEP values of 2 or 3 were needed respectively
570 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
571 or for SAVE_EXPRs, but they are now unused.
573 TYPE is the type that will be used for the stack slot. */
576 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
580 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
583 /* If SIZE is -1 it means that somebody tried to allocate a temporary
584 of a variable size. */
585 gcc_assert (size
!= -1);
587 /* These are now unused. */
588 gcc_assert (keep
<= 1);
590 align
= get_stack_local_alignment (type
, mode
);
592 /* Try to find an available, already-allocated temporary of the proper
593 mode which meets the size and alignment requirements. Choose the
594 smallest one with the closest alignment.
596 If assign_stack_temp is called outside of the tree->rtl expansion,
597 we cannot reuse the stack slots (that may still refer to
598 VIRTUAL_STACK_VARS_REGNUM). */
599 if (!virtuals_instantiated
)
601 for (p
= avail_temp_slots
; p
; p
= p
->next
)
603 if (p
->align
>= align
&& p
->size
>= size
604 && GET_MODE (p
->slot
) == mode
605 && objects_must_conflict_p (p
->type
, type
)
606 && (best_p
== 0 || best_p
->size
> p
->size
607 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
609 if (p
->align
== align
&& p
->size
== size
)
612 cut_slot_from_list (selected
, &avail_temp_slots
);
621 /* Make our best, if any, the one to use. */
625 cut_slot_from_list (selected
, &avail_temp_slots
);
627 /* If there are enough aligned bytes left over, make them into a new
628 temp_slot so that the extra bytes don't get wasted. Do this only
629 for BLKmode slots, so that we can be sure of the alignment. */
630 if (GET_MODE (best_p
->slot
) == BLKmode
)
632 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
633 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
635 if (best_p
->size
- rounded_size
>= alignment
)
637 p
= GGC_NEW (struct temp_slot
);
638 p
->in_use
= p
->addr_taken
= 0;
639 p
->size
= best_p
->size
- rounded_size
;
640 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
641 p
->full_size
= best_p
->full_size
- rounded_size
;
642 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
643 p
->align
= best_p
->align
;
645 p
->type
= best_p
->type
;
646 insert_slot_to_list (p
, &avail_temp_slots
);
648 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
651 best_p
->size
= rounded_size
;
652 best_p
->full_size
= rounded_size
;
657 /* If we still didn't find one, make a new temporary. */
660 HOST_WIDE_INT frame_offset_old
= frame_offset
;
662 p
= GGC_NEW (struct temp_slot
);
664 /* We are passing an explicit alignment request to assign_stack_local.
665 One side effect of that is assign_stack_local will not round SIZE
666 to ensure the frame offset remains suitably aligned.
668 So for requests which depended on the rounding of SIZE, we go ahead
669 and round it now. We also make sure ALIGNMENT is at least
670 BIGGEST_ALIGNMENT. */
671 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
672 p
->slot
= assign_stack_local (mode
,
674 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
680 /* The following slot size computation is necessary because we don't
681 know the actual size of the temporary slot until assign_stack_local
682 has performed all the frame alignment and size rounding for the
683 requested temporary. Note that extra space added for alignment
684 can be either above or below this stack slot depending on which
685 way the frame grows. We include the extra space if and only if it
686 is above this slot. */
687 if (FRAME_GROWS_DOWNWARD
)
688 p
->size
= frame_offset_old
- frame_offset
;
692 /* Now define the fields used by combine_temp_slots. */
693 if (FRAME_GROWS_DOWNWARD
)
695 p
->base_offset
= frame_offset
;
696 p
->full_size
= frame_offset_old
- frame_offset
;
700 p
->base_offset
= frame_offset_old
;
701 p
->full_size
= frame_offset
- frame_offset_old
;
712 p
->level
= temp_slot_level
;
715 pp
= temp_slots_at_level (p
->level
);
716 insert_slot_to_list (p
, pp
);
718 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
719 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
720 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
722 /* If we know the alias set for the memory that will be used, use
723 it. If there's no TYPE, then we don't know anything about the
724 alias set for the memory. */
725 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
726 set_mem_align (slot
, align
);
728 /* If a type is specified, set the relevant flags. */
731 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
732 MEM_SET_IN_STRUCT_P (slot
, (AGGREGATE_TYPE_P (type
)
733 || TREE_CODE (type
) == COMPLEX_TYPE
));
735 MEM_NOTRAP_P (slot
) = 1;
740 /* Allocate a temporary stack slot and record it for possible later
741 reuse. First three arguments are same as in preceding function. */
744 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
746 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
749 /* Assign a temporary.
750 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
751 and so that should be used in error messages. In either case, we
752 allocate of the given type.
753 KEEP is as for assign_stack_temp.
754 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
755 it is 0 if a register is OK.
756 DONT_PROMOTE is 1 if we should not promote values in register
760 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
761 int dont_promote ATTRIBUTE_UNUSED
)
764 enum machine_mode mode
;
769 if (DECL_P (type_or_decl
))
770 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
772 decl
= NULL
, type
= type_or_decl
;
774 mode
= TYPE_MODE (type
);
776 unsignedp
= TYPE_UNSIGNED (type
);
779 if (mode
== BLKmode
|| memory_required
)
781 HOST_WIDE_INT size
= int_size_in_bytes (type
);
784 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
785 problems with allocating the stack space. */
789 /* Unfortunately, we don't yet know how to allocate variable-sized
790 temporaries. However, sometimes we can find a fixed upper limit on
791 the size, so try that instead. */
793 size
= max_int_size_in_bytes (type
);
795 /* The size of the temporary may be too large to fit into an integer. */
796 /* ??? Not sure this should happen except for user silliness, so limit
797 this to things that aren't compiler-generated temporaries. The
798 rest of the time we'll die in assign_stack_temp_for_type. */
799 if (decl
&& size
== -1
800 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
802 error ("size of variable %q+D is too large", decl
);
806 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
812 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
815 return gen_reg_rtx (mode
);
818 /* Combine temporary stack slots which are adjacent on the stack.
820 This allows for better use of already allocated stack space. This is only
821 done for BLKmode slots because we can be sure that we won't have alignment
822 problems in this case. */
825 combine_temp_slots (void)
827 struct temp_slot
*p
, *q
, *next
, *next_q
;
830 /* We can't combine slots, because the information about which slot
831 is in which alias set will be lost. */
832 if (flag_strict_aliasing
)
835 /* If there are a lot of temp slots, don't do anything unless
836 high levels of optimization. */
837 if (! flag_expensive_optimizations
)
838 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
839 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
842 for (p
= avail_temp_slots
; p
; p
= next
)
848 if (GET_MODE (p
->slot
) != BLKmode
)
851 for (q
= p
->next
; q
; q
= next_q
)
857 if (GET_MODE (q
->slot
) != BLKmode
)
860 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
862 /* Q comes after P; combine Q into P. */
864 p
->full_size
+= q
->full_size
;
867 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
869 /* P comes after Q; combine P into Q. */
871 q
->full_size
+= p
->full_size
;
876 cut_slot_from_list (q
, &avail_temp_slots
);
879 /* Either delete P or advance past it. */
881 cut_slot_from_list (p
, &avail_temp_slots
);
885 /* Find the temp slot corresponding to the object at address X. */
887 static struct temp_slot
*
888 find_temp_slot_from_address (rtx x
)
894 for (i
= max_slot_level (); i
>= 0; i
--)
895 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
897 if (XEXP (p
->slot
, 0) == x
899 || (GET_CODE (x
) == PLUS
900 && XEXP (x
, 0) == virtual_stack_vars_rtx
901 && GET_CODE (XEXP (x
, 1)) == CONST_INT
902 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
903 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
906 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
907 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
908 if (XEXP (next
, 0) == x
)
912 /* If we have a sum involving a register, see if it points to a temp
914 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
915 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
917 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
918 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
924 /* Indicate that NEW_RTX is an alternate way of referring to the temp
925 slot that previously was known by OLD_RTX. */
928 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
932 if (rtx_equal_p (old_rtx
, new_rtx
))
935 p
= find_temp_slot_from_address (old_rtx
);
937 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
938 NEW_RTX is a register, see if one operand of the PLUS is a
939 temporary location. If so, NEW_RTX points into it. Otherwise,
940 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
941 in common between them. If so, try a recursive call on those
945 if (GET_CODE (old_rtx
) != PLUS
)
950 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
951 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
954 else if (GET_CODE (new_rtx
) != PLUS
)
957 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
958 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
959 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
960 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
961 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
962 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
963 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
964 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
969 /* Otherwise add an alias for the temp's address. */
970 else if (p
->address
== 0)
971 p
->address
= new_rtx
;
974 if (GET_CODE (p
->address
) != EXPR_LIST
)
975 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
977 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new_rtx
, p
->address
);
981 /* If X could be a reference to a temporary slot, mark the fact that its
982 address was taken. */
985 mark_temp_addr_taken (rtx x
)
992 /* If X is not in memory or is at a constant address, it cannot be in
994 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
997 p
= find_temp_slot_from_address (XEXP (x
, 0));
1002 /* If X could be a reference to a temporary slot, mark that slot as
1003 belonging to the to one level higher than the current level. If X
1004 matched one of our slots, just mark that one. Otherwise, we can't
1005 easily predict which it is, so upgrade all of them. Kept slots
1006 need not be touched.
1008 This is called when an ({...}) construct occurs and a statement
1009 returns a value in memory. */
1012 preserve_temp_slots (rtx x
)
1014 struct temp_slot
*p
= 0, *next
;
1016 /* If there is no result, we still might have some objects whose address
1017 were taken, so we need to make sure they stay around. */
1020 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1025 move_slot_to_level (p
, temp_slot_level
- 1);
1031 /* If X is a register that is being used as a pointer, see if we have
1032 a temporary slot we know it points to. To be consistent with
1033 the code below, we really should preserve all non-kept slots
1034 if we can't find a match, but that seems to be much too costly. */
1035 if (REG_P (x
) && REG_POINTER (x
))
1036 p
= find_temp_slot_from_address (x
);
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot, but it can contain something whose address was
1041 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1043 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1048 move_slot_to_level (p
, temp_slot_level
- 1);
1054 /* First see if we can find a match. */
1056 p
= find_temp_slot_from_address (XEXP (x
, 0));
1060 /* Move everything at our level whose address was taken to our new
1061 level in case we used its address. */
1062 struct temp_slot
*q
;
1064 if (p
->level
== temp_slot_level
)
1066 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1070 if (p
!= q
&& q
->addr_taken
)
1071 move_slot_to_level (q
, temp_slot_level
- 1);
1074 move_slot_to_level (p
, temp_slot_level
- 1);
1080 /* Otherwise, preserve all non-kept slots at this level. */
1081 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1086 move_slot_to_level (p
, temp_slot_level
- 1);
1090 /* Free all temporaries used so far. This is normally called at the
1091 end of generating code for a statement. */
1094 free_temp_slots (void)
1096 struct temp_slot
*p
, *next
;
1098 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1103 make_slot_available (p
);
1106 combine_temp_slots ();
1109 /* Push deeper into the nesting level for stack temporaries. */
1112 push_temp_slots (void)
1117 /* Pop a temporary nesting level. All slots in use in the current level
1121 pop_temp_slots (void)
1123 struct temp_slot
*p
, *next
;
1125 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1128 make_slot_available (p
);
1131 combine_temp_slots ();
1136 /* Initialize temporary slots. */
1139 init_temp_slots (void)
1141 /* We have not allocated any temporaries yet. */
1142 avail_temp_slots
= 0;
1143 used_temp_slots
= 0;
1144 temp_slot_level
= 0;
1147 /* These routines are responsible for converting virtual register references
1148 to the actual hard register references once RTL generation is complete.
1150 The following four variables are used for communication between the
1151 routines. They contain the offsets of the virtual registers from their
1152 respective hard registers. */
1154 static int in_arg_offset
;
1155 static int var_offset
;
1156 static int dynamic_offset
;
1157 static int out_arg_offset
;
1158 static int cfa_offset
;
1160 /* In most machines, the stack pointer register is equivalent to the bottom
1163 #ifndef STACK_POINTER_OFFSET
1164 #define STACK_POINTER_OFFSET 0
1167 /* If not defined, pick an appropriate default for the offset of dynamically
1168 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1169 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1171 #ifndef STACK_DYNAMIC_OFFSET
1173 /* The bottom of the stack points to the actual arguments. If
1174 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1175 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1176 stack space for register parameters is not pushed by the caller, but
1177 rather part of the fixed stack areas and hence not included in
1178 `crtl->outgoing_args_size'. Nevertheless, we must allow
1179 for it when allocating stack dynamic objects. */
1181 #if defined(REG_PARM_STACK_SPACE)
1182 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1183 ((ACCUMULATE_OUTGOING_ARGS \
1184 ? (crtl->outgoing_args_size \
1185 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1186 : REG_PARM_STACK_SPACE (FNDECL))) \
1187 : 0) + (STACK_POINTER_OFFSET))
1189 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1190 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1191 + (STACK_POINTER_OFFSET))
1196 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1197 is a virtual register, return the equivalent hard register and set the
1198 offset indirectly through the pointer. Otherwise, return 0. */
1201 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1204 HOST_WIDE_INT offset
;
1206 if (x
== virtual_incoming_args_rtx
)
1208 if (stack_realign_drap
)
1210 /* Replace virtual_incoming_args_rtx with internal arg
1211 pointer if DRAP is used to realign stack. */
1212 new_rtx
= crtl
->args
.internal_arg_pointer
;
1216 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1218 else if (x
== virtual_stack_vars_rtx
)
1219 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1220 else if (x
== virtual_stack_dynamic_rtx
)
1221 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1222 else if (x
== virtual_outgoing_args_rtx
)
1223 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1224 else if (x
== virtual_cfa_rtx
)
1226 #ifdef FRAME_POINTER_CFA_OFFSET
1227 new_rtx
= frame_pointer_rtx
;
1229 new_rtx
= arg_pointer_rtx
;
1231 offset
= cfa_offset
;
1240 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1241 Instantiate any virtual registers present inside of *LOC. The expression
1242 is simplified, as much as possible, but is not to be considered "valid"
1243 in any sense implied by the target. If any change is made, set CHANGED
1247 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1249 HOST_WIDE_INT offset
;
1250 bool *changed
= (bool *) data
;
1257 switch (GET_CODE (x
))
1260 new_rtx
= instantiate_new_reg (x
, &offset
);
1263 *loc
= plus_constant (new_rtx
, offset
);
1270 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1273 new_rtx
= plus_constant (new_rtx
, offset
);
1274 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1280 /* FIXME -- from old code */
1281 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1282 we can commute the PLUS and SUBREG because pointers into the
1283 frame are well-behaved. */
1293 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1294 matches the predicate for insn CODE operand OPERAND. */
1297 safe_insn_predicate (int code
, int operand
, rtx x
)
1299 const struct insn_operand_data
*op_data
;
1304 op_data
= &insn_data
[code
].operand
[operand
];
1305 if (op_data
->predicate
== NULL
)
1308 return op_data
->predicate (x
, op_data
->mode
);
1311 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1312 registers present inside of insn. The result will be a valid insn. */
1315 instantiate_virtual_regs_in_insn (rtx insn
)
1317 HOST_WIDE_INT offset
;
1319 bool any_change
= false;
1320 rtx set
, new_rtx
, x
, seq
;
1322 /* There are some special cases to be handled first. */
1323 set
= single_set (insn
);
1326 /* We're allowed to assign to a virtual register. This is interpreted
1327 to mean that the underlying register gets assigned the inverse
1328 transformation. This is used, for example, in the handling of
1330 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1335 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1336 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1338 x
= force_operand (x
, new_rtx
);
1340 emit_move_insn (new_rtx
, x
);
1345 emit_insn_before (seq
, insn
);
1350 /* Handle a straight copy from a virtual register by generating a
1351 new add insn. The difference between this and falling through
1352 to the generic case is avoiding a new pseudo and eliminating a
1353 move insn in the initial rtl stream. */
1354 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1355 if (new_rtx
&& offset
!= 0
1356 && REG_P (SET_DEST (set
))
1357 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1361 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1362 new_rtx
, GEN_INT (offset
), SET_DEST (set
),
1363 1, OPTAB_LIB_WIDEN
);
1364 if (x
!= SET_DEST (set
))
1365 emit_move_insn (SET_DEST (set
), x
);
1370 emit_insn_before (seq
, insn
);
1375 extract_insn (insn
);
1376 insn_code
= INSN_CODE (insn
);
1378 /* Handle a plus involving a virtual register by determining if the
1379 operands remain valid if they're modified in place. */
1380 if (GET_CODE (SET_SRC (set
)) == PLUS
1381 && recog_data
.n_operands
>= 3
1382 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1383 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1384 && GET_CODE (recog_data
.operand
[2]) == CONST_INT
1385 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1387 offset
+= INTVAL (recog_data
.operand
[2]);
1389 /* If the sum is zero, then replace with a plain move. */
1391 && REG_P (SET_DEST (set
))
1392 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1395 emit_move_insn (SET_DEST (set
), new_rtx
);
1399 emit_insn_before (seq
, insn
);
1404 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1406 /* Using validate_change and apply_change_group here leaves
1407 recog_data in an invalid state. Since we know exactly what
1408 we want to check, do those two by hand. */
1409 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1410 && safe_insn_predicate (insn_code
, 2, x
))
1412 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1413 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1416 /* Fall through into the regular operand fixup loop in
1417 order to take care of operands other than 1 and 2. */
1423 extract_insn (insn
);
1424 insn_code
= INSN_CODE (insn
);
1427 /* In the general case, we expect virtual registers to appear only in
1428 operands, and then only as either bare registers or inside memories. */
1429 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1431 x
= recog_data
.operand
[i
];
1432 switch (GET_CODE (x
))
1436 rtx addr
= XEXP (x
, 0);
1437 bool changed
= false;
1439 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1444 x
= replace_equiv_address (x
, addr
);
1445 /* It may happen that the address with the virtual reg
1446 was valid (e.g. based on the virtual stack reg, which might
1447 be acceptable to the predicates with all offsets), whereas
1448 the address now isn't anymore, for instance when the address
1449 is still offsetted, but the base reg isn't virtual-stack-reg
1450 anymore. Below we would do a force_reg on the whole operand,
1451 but this insn might actually only accept memory. Hence,
1452 before doing that last resort, try to reload the address into
1453 a register, so this operand stays a MEM. */
1454 if (!safe_insn_predicate (insn_code
, i
, x
))
1456 addr
= force_reg (GET_MODE (addr
), addr
);
1457 x
= replace_equiv_address (x
, addr
);
1462 emit_insn_before (seq
, insn
);
1467 new_rtx
= instantiate_new_reg (x
, &offset
);
1468 if (new_rtx
== NULL
)
1476 /* Careful, special mode predicates may have stuff in
1477 insn_data[insn_code].operand[i].mode that isn't useful
1478 to us for computing a new value. */
1479 /* ??? Recognize address_operand and/or "p" constraints
1480 to see if (plus new offset) is a valid before we put
1481 this through expand_simple_binop. */
1482 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1483 GEN_INT (offset
), NULL_RTX
,
1484 1, OPTAB_LIB_WIDEN
);
1487 emit_insn_before (seq
, insn
);
1492 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1493 if (new_rtx
== NULL
)
1498 new_rtx
= expand_simple_binop (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1499 GEN_INT (offset
), NULL_RTX
,
1500 1, OPTAB_LIB_WIDEN
);
1503 emit_insn_before (seq
, insn
);
1505 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1506 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1514 /* At this point, X contains the new value for the operand.
1515 Validate the new value vs the insn predicate. Note that
1516 asm insns will have insn_code -1 here. */
1517 if (!safe_insn_predicate (insn_code
, i
, x
))
1520 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1524 emit_insn_before (seq
, insn
);
1527 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1533 /* Propagate operand changes into the duplicates. */
1534 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1535 *recog_data
.dup_loc
[i
]
1536 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1538 /* Force re-recognition of the instruction for validation. */
1539 INSN_CODE (insn
) = -1;
1542 if (asm_noperands (PATTERN (insn
)) >= 0)
1544 if (!check_asm_operands (PATTERN (insn
)))
1546 error_for_asm (insn
, "impossible constraint in %<asm%>");
1552 if (recog_memoized (insn
) < 0)
1553 fatal_insn_not_found (insn
);
1557 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1558 do any instantiation required. */
1561 instantiate_decl_rtl (rtx x
)
1568 /* If this is a CONCAT, recurse for the pieces. */
1569 if (GET_CODE (x
) == CONCAT
)
1571 instantiate_decl_rtl (XEXP (x
, 0));
1572 instantiate_decl_rtl (XEXP (x
, 1));
1576 /* If this is not a MEM, no need to do anything. Similarly if the
1577 address is a constant or a register that is not a virtual register. */
1582 if (CONSTANT_P (addr
)
1584 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1585 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1588 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1591 /* Helper for instantiate_decls called via walk_tree: Process all decls
1592 in the given DECL_VALUE_EXPR. */
1595 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1601 if (DECL_P (t
) && DECL_RTL_SET_P (t
))
1602 instantiate_decl_rtl (DECL_RTL (t
));
1607 /* Subroutine of instantiate_decls: Process all decls in the given
1608 BLOCK node and all its subblocks. */
1611 instantiate_decls_1 (tree let
)
1615 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1617 if (DECL_RTL_SET_P (t
))
1618 instantiate_decl_rtl (DECL_RTL (t
));
1619 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1621 tree v
= DECL_VALUE_EXPR (t
);
1622 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1626 /* Process all subblocks. */
1627 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1628 instantiate_decls_1 (t
);
1631 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1632 all virtual registers in their DECL_RTL's. */
1635 instantiate_decls (tree fndecl
)
1639 /* Process all parameters of the function. */
1640 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
1642 instantiate_decl_rtl (DECL_RTL (decl
));
1643 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1644 if (DECL_HAS_VALUE_EXPR_P (decl
))
1646 tree v
= DECL_VALUE_EXPR (decl
);
1647 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1651 /* Now process all variables defined in the function or its subblocks. */
1652 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1654 t
= cfun
->local_decls
;
1655 cfun
->local_decls
= NULL_TREE
;
1658 next
= TREE_CHAIN (t
);
1659 decl
= TREE_VALUE (t
);
1660 if (DECL_RTL_SET_P (decl
))
1661 instantiate_decl_rtl (DECL_RTL (decl
));
1666 /* Pass through the INSNS of function FNDECL and convert virtual register
1667 references to hard register references. */
1670 instantiate_virtual_regs (void)
1674 /* Compute the offsets to use for this function. */
1675 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1676 var_offset
= STARTING_FRAME_OFFSET
;
1677 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1678 out_arg_offset
= STACK_POINTER_OFFSET
;
1679 #ifdef FRAME_POINTER_CFA_OFFSET
1680 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1682 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1685 /* Initialize recognition, indicating that volatile is OK. */
1688 /* Scan through all the insns, instantiating every virtual register still
1690 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1693 /* These patterns in the instruction stream can never be recognized.
1694 Fortunately, they shouldn't contain virtual registers either. */
1695 if (GET_CODE (PATTERN (insn
)) == USE
1696 || GET_CODE (PATTERN (insn
)) == CLOBBER
1697 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1698 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1699 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1702 instantiate_virtual_regs_in_insn (insn
);
1704 if (INSN_DELETED_P (insn
))
1707 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1709 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1710 if (GET_CODE (insn
) == CALL_INSN
)
1711 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1712 instantiate_virtual_regs_in_rtx
, NULL
);
1715 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1716 instantiate_decls (current_function_decl
);
1718 targetm
.instantiate_decls ();
1720 /* Indicate that, from now on, assign_stack_local should use
1721 frame_pointer_rtx. */
1722 virtuals_instantiated
= 1;
1726 struct rtl_opt_pass pass_instantiate_virtual_regs
=
1732 instantiate_virtual_regs
, /* execute */
1735 0, /* static_pass_number */
1737 0, /* properties_required */
1738 0, /* properties_provided */
1739 0, /* properties_destroyed */
1740 0, /* todo_flags_start */
1741 TODO_dump_func
/* todo_flags_finish */
1746 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1747 This means a type for which function calls must pass an address to the
1748 function or get an address back from the function.
1749 EXP may be a type node or an expression (whose type is tested). */
1752 aggregate_value_p (const_tree exp
, const_tree fntype
)
1754 int i
, regno
, nregs
;
1757 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1759 /* DECL node associated with FNTYPE when relevant, which we might need to
1760 check for by-invisible-reference returns, typically for CALL_EXPR input
1762 const_tree fndecl
= NULL_TREE
;
1765 switch (TREE_CODE (fntype
))
1768 fndecl
= get_callee_fndecl (fntype
);
1769 fntype
= fndecl
? TREE_TYPE (fndecl
) : 0;
1773 fntype
= TREE_TYPE (fndecl
);
1778 case IDENTIFIER_NODE
:
1782 /* We don't expect other rtl types here. */
1786 if (TREE_CODE (type
) == VOID_TYPE
)
1789 /* If the front end has decided that this needs to be passed by
1790 reference, do so. */
1791 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
1792 && DECL_BY_REFERENCE (exp
))
1795 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1796 called function RESULT_DECL, meaning the function returns in memory by
1797 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1798 on the function type, which used to be the way to request such a return
1799 mechanism but might now be causing troubles at gimplification time if
1800 temporaries with the function type need to be created. */
1801 if (TREE_CODE (exp
) == CALL_EXPR
&& fndecl
&& DECL_RESULT (fndecl
)
1802 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
1805 if (targetm
.calls
.return_in_memory (type
, fntype
))
1807 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1808 and thus can't be returned in registers. */
1809 if (TREE_ADDRESSABLE (type
))
1811 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
1813 /* Make sure we have suitable call-clobbered regs to return
1814 the value in; if not, we must return it in memory. */
1815 reg
= hard_function_value (type
, 0, fntype
, 0);
1817 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1822 regno
= REGNO (reg
);
1823 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
1824 for (i
= 0; i
< nregs
; i
++)
1825 if (! call_used_regs
[regno
+ i
])
1830 /* Return true if we should assign DECL a pseudo register; false if it
1831 should live on the local stack. */
1834 use_register_for_decl (const_tree decl
)
1836 if (!targetm
.calls
.allocate_stack_slots_for_args())
1839 /* Honor volatile. */
1840 if (TREE_SIDE_EFFECTS (decl
))
1843 /* Honor addressability. */
1844 if (TREE_ADDRESSABLE (decl
))
1847 /* Only register-like things go in registers. */
1848 if (DECL_MODE (decl
) == BLKmode
)
1851 /* If -ffloat-store specified, don't put explicit float variables
1853 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1854 propagates values across these stores, and it probably shouldn't. */
1855 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
1858 /* If we're not interested in tracking debugging information for
1859 this decl, then we can certainly put it in a register. */
1860 if (DECL_IGNORED_P (decl
))
1863 return (optimize
|| DECL_REGISTER (decl
));
1866 /* Return true if TYPE should be passed by invisible reference. */
1869 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1870 tree type
, bool named_arg
)
1874 /* If this type contains non-trivial constructors, then it is
1875 forbidden for the middle-end to create any new copies. */
1876 if (TREE_ADDRESSABLE (type
))
1879 /* GCC post 3.4 passes *all* variable sized types by reference. */
1880 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
1884 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
1887 /* Return true if TYPE, which is passed by reference, should be callee
1888 copied instead of caller copied. */
1891 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
1892 tree type
, bool named_arg
)
1894 if (type
&& TREE_ADDRESSABLE (type
))
1896 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
1899 /* Structures to communicate between the subroutines of assign_parms.
1900 The first holds data persistent across all parameters, the second
1901 is cleared out for each parameter. */
1903 struct assign_parm_data_all
1905 CUMULATIVE_ARGS args_so_far
;
1906 struct args_size stack_args_size
;
1907 tree function_result_decl
;
1909 rtx first_conversion_insn
;
1910 rtx last_conversion_insn
;
1911 HOST_WIDE_INT pretend_args_size
;
1912 HOST_WIDE_INT extra_pretend_bytes
;
1913 int reg_parm_stack_space
;
1916 struct assign_parm_data_one
1922 enum machine_mode nominal_mode
;
1923 enum machine_mode passed_mode
;
1924 enum machine_mode promoted_mode
;
1925 struct locate_and_pad_arg_data locate
;
1927 BOOL_BITFIELD named_arg
: 1;
1928 BOOL_BITFIELD passed_pointer
: 1;
1929 BOOL_BITFIELD on_stack
: 1;
1930 BOOL_BITFIELD loaded_in_reg
: 1;
1933 /* A subroutine of assign_parms. Initialize ALL. */
1936 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
1940 memset (all
, 0, sizeof (*all
));
1942 fntype
= TREE_TYPE (current_function_decl
);
1944 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1945 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
1947 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
1948 current_function_decl
, -1);
1951 #ifdef REG_PARM_STACK_SPACE
1952 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
1956 /* If ARGS contains entries with complex types, split the entry into two
1957 entries of the component type. Return a new list of substitutions are
1958 needed, else the old list. */
1961 split_complex_args (tree args
)
1965 /* Before allocating memory, check for the common case of no complex. */
1966 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1968 tree type
= TREE_TYPE (p
);
1969 if (TREE_CODE (type
) == COMPLEX_TYPE
1970 && targetm
.calls
.split_complex_arg (type
))
1976 args
= copy_list (args
);
1978 for (p
= args
; p
; p
= TREE_CHAIN (p
))
1980 tree type
= TREE_TYPE (p
);
1981 if (TREE_CODE (type
) == COMPLEX_TYPE
1982 && targetm
.calls
.split_complex_arg (type
))
1985 tree subtype
= TREE_TYPE (type
);
1986 bool addressable
= TREE_ADDRESSABLE (p
);
1988 /* Rewrite the PARM_DECL's type with its component. */
1989 TREE_TYPE (p
) = subtype
;
1990 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
1991 DECL_MODE (p
) = VOIDmode
;
1992 DECL_SIZE (p
) = NULL
;
1993 DECL_SIZE_UNIT (p
) = NULL
;
1994 /* If this arg must go in memory, put it in a pseudo here.
1995 We can't allow it to go in memory as per normal parms,
1996 because the usual place might not have the imag part
1997 adjacent to the real part. */
1998 DECL_ARTIFICIAL (p
) = addressable
;
1999 DECL_IGNORED_P (p
) = addressable
;
2000 TREE_ADDRESSABLE (p
) = 0;
2003 /* Build a second synthetic decl. */
2004 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
2005 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2006 DECL_ARTIFICIAL (decl
) = addressable
;
2007 DECL_IGNORED_P (decl
) = addressable
;
2008 layout_decl (decl
, 0);
2010 /* Splice it in; skip the new decl. */
2011 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
2012 TREE_CHAIN (p
) = decl
;
2020 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2021 the hidden struct return argument, and (abi willing) complex args.
2022 Return the new parameter list. */
2025 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2027 tree fndecl
= current_function_decl
;
2028 tree fntype
= TREE_TYPE (fndecl
);
2029 tree fnargs
= DECL_ARGUMENTS (fndecl
);
2031 /* If struct value address is treated as the first argument, make it so. */
2032 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2033 && ! cfun
->returns_pcc_struct
2034 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2036 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2039 decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
2040 DECL_ARG_TYPE (decl
) = type
;
2041 DECL_ARTIFICIAL (decl
) = 1;
2042 DECL_IGNORED_P (decl
) = 1;
2044 TREE_CHAIN (decl
) = fnargs
;
2046 all
->function_result_decl
= decl
;
2049 all
->orig_fnargs
= fnargs
;
2051 /* If the target wants to split complex arguments into scalars, do so. */
2052 if (targetm
.calls
.split_complex_arg
)
2053 fnargs
= split_complex_args (fnargs
);
2058 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2059 data for the parameter. Incorporate ABI specifics such as pass-by-
2060 reference and type promotion. */
2063 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2064 struct assign_parm_data_one
*data
)
2066 tree nominal_type
, passed_type
;
2067 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2069 memset (data
, 0, sizeof (*data
));
2071 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2073 data
->named_arg
= 1; /* No variadic parms. */
2074 else if (TREE_CHAIN (parm
))
2075 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2076 else if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2077 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2079 data
->named_arg
= 0; /* Treat as variadic. */
2081 nominal_type
= TREE_TYPE (parm
);
2082 passed_type
= DECL_ARG_TYPE (parm
);
2084 /* Look out for errors propagating this far. Also, if the parameter's
2085 type is void then its value doesn't matter. */
2086 if (TREE_TYPE (parm
) == error_mark_node
2087 /* This can happen after weird syntax errors
2088 or if an enum type is defined among the parms. */
2089 || TREE_CODE (parm
) != PARM_DECL
2090 || passed_type
== NULL
2091 || VOID_TYPE_P (nominal_type
))
2093 nominal_type
= passed_type
= void_type_node
;
2094 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2098 /* Find mode of arg as it is passed, and mode of arg as it should be
2099 during execution of this function. */
2100 passed_mode
= TYPE_MODE (passed_type
);
2101 nominal_mode
= TYPE_MODE (nominal_type
);
2103 /* If the parm is to be passed as a transparent union, use the type of
2104 the first field for the tests below. We have already verified that
2105 the modes are the same. */
2106 if (TREE_CODE (passed_type
) == UNION_TYPE
2107 && TYPE_TRANSPARENT_UNION (passed_type
))
2108 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
2110 /* See if this arg was passed by invisible reference. */
2111 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2112 passed_type
, data
->named_arg
))
2114 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2115 data
->passed_pointer
= true;
2116 passed_mode
= nominal_mode
= Pmode
;
2119 /* Find mode as it is passed by the ABI. */
2120 promoted_mode
= passed_mode
;
2121 if (targetm
.calls
.promote_function_args (TREE_TYPE (current_function_decl
)))
2123 int unsignedp
= TYPE_UNSIGNED (passed_type
);
2124 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
2129 data
->nominal_type
= nominal_type
;
2130 data
->passed_type
= passed_type
;
2131 data
->nominal_mode
= nominal_mode
;
2132 data
->passed_mode
= passed_mode
;
2133 data
->promoted_mode
= promoted_mode
;
2136 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2139 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2140 struct assign_parm_data_one
*data
, bool no_rtl
)
2142 int varargs_pretend_bytes
= 0;
2144 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2145 data
->promoted_mode
,
2147 &varargs_pretend_bytes
, no_rtl
);
2149 /* If the back-end has requested extra stack space, record how much is
2150 needed. Do not change pretend_args_size otherwise since it may be
2151 nonzero from an earlier partial argument. */
2152 if (varargs_pretend_bytes
> 0)
2153 all
->pretend_args_size
= varargs_pretend_bytes
;
2156 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2157 the incoming location of the current parameter. */
2160 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2161 struct assign_parm_data_one
*data
)
2163 HOST_WIDE_INT pretend_bytes
= 0;
2167 if (data
->promoted_mode
== VOIDmode
)
2169 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2173 #ifdef FUNCTION_INCOMING_ARG
2174 entry_parm
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2175 data
->passed_type
, data
->named_arg
);
2177 entry_parm
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2178 data
->passed_type
, data
->named_arg
);
2181 if (entry_parm
== 0)
2182 data
->promoted_mode
= data
->passed_mode
;
2184 /* Determine parm's home in the stack, in case it arrives in the stack
2185 or we should pretend it did. Compute the stack position and rtx where
2186 the argument arrives and its size.
2188 There is one complexity here: If this was a parameter that would
2189 have been passed in registers, but wasn't only because it is
2190 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2191 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2192 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2193 as it was the previous time. */
2194 in_regs
= entry_parm
!= 0;
2195 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2198 if (!in_regs
&& !data
->named_arg
)
2200 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2203 #ifdef FUNCTION_INCOMING_ARG
2204 tem
= FUNCTION_INCOMING_ARG (all
->args_so_far
, data
->promoted_mode
,
2205 data
->passed_type
, true);
2207 tem
= FUNCTION_ARG (all
->args_so_far
, data
->promoted_mode
,
2208 data
->passed_type
, true);
2210 in_regs
= tem
!= NULL
;
2214 /* If this parameter was passed both in registers and in the stack, use
2215 the copy on the stack. */
2216 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2224 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2225 data
->promoted_mode
,
2228 data
->partial
= partial
;
2230 /* The caller might already have allocated stack space for the
2231 register parameters. */
2232 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2234 /* Part of this argument is passed in registers and part
2235 is passed on the stack. Ask the prologue code to extend
2236 the stack part so that we can recreate the full value.
2238 PRETEND_BYTES is the size of the registers we need to store.
2239 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2240 stack space that the prologue should allocate.
2242 Internally, gcc assumes that the argument pointer is aligned
2243 to STACK_BOUNDARY bits. This is used both for alignment
2244 optimizations (see init_emit) and to locate arguments that are
2245 aligned to more than PARM_BOUNDARY bits. We must preserve this
2246 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2247 a stack boundary. */
2249 /* We assume at most one partial arg, and it must be the first
2250 argument on the stack. */
2251 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2253 pretend_bytes
= partial
;
2254 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2256 /* We want to align relative to the actual stack pointer, so
2257 don't include this in the stack size until later. */
2258 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2262 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2263 entry_parm
? data
->partial
: 0, current_function_decl
,
2264 &all
->stack_args_size
, &data
->locate
);
2266 /* Update parm_stack_boundary if this parameter is passed in the
2268 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2269 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2271 /* Adjust offsets to include the pretend args. */
2272 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2273 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2274 data
->locate
.offset
.constant
+= pretend_bytes
;
2276 data
->entry_parm
= entry_parm
;
2279 /* A subroutine of assign_parms. If there is actually space on the stack
2280 for this parm, count it in stack_args_size and return true. */
2283 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2284 struct assign_parm_data_one
*data
)
2286 /* Trivially true if we've no incoming register. */
2287 if (data
->entry_parm
== NULL
)
2289 /* Also true if we're partially in registers and partially not,
2290 since we've arranged to drop the entire argument on the stack. */
2291 else if (data
->partial
!= 0)
2293 /* Also true if the target says that it's passed in both registers
2294 and on the stack. */
2295 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2296 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2298 /* Also true if the target says that there's stack allocated for
2299 all register parameters. */
2300 else if (all
->reg_parm_stack_space
> 0)
2302 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2306 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2307 if (data
->locate
.size
.var
)
2308 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2313 /* A subroutine of assign_parms. Given that this parameter is allocated
2314 stack space by the ABI, find it. */
2317 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2319 rtx offset_rtx
, stack_parm
;
2320 unsigned int align
, boundary
;
2322 /* If we're passing this arg using a reg, make its stack home the
2323 aligned stack slot. */
2324 if (data
->entry_parm
)
2325 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2327 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2329 stack_parm
= crtl
->args
.internal_arg_pointer
;
2330 if (offset_rtx
!= const0_rtx
)
2331 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2332 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2334 set_mem_attributes (stack_parm
, parm
, 1);
2335 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2336 while promoted mode's size is needed. */
2337 if (data
->promoted_mode
!= BLKmode
2338 && data
->promoted_mode
!= DECL_MODE (parm
))
2339 set_mem_size (stack_parm
, GEN_INT (GET_MODE_SIZE (data
->promoted_mode
)));
2341 boundary
= data
->locate
.boundary
;
2342 align
= BITS_PER_UNIT
;
2344 /* If we're padding upward, we know that the alignment of the slot
2345 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2346 intentionally forcing upward padding. Otherwise we have to come
2347 up with a guess at the alignment based on OFFSET_RTX. */
2348 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2350 else if (GET_CODE (offset_rtx
) == CONST_INT
)
2352 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2353 align
= align
& -align
;
2355 set_mem_align (stack_parm
, align
);
2357 if (data
->entry_parm
)
2358 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2360 data
->stack_parm
= stack_parm
;
2363 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2364 always valid and contiguous. */
2367 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2369 rtx entry_parm
= data
->entry_parm
;
2370 rtx stack_parm
= data
->stack_parm
;
2372 /* If this parm was passed part in regs and part in memory, pretend it
2373 arrived entirely in memory by pushing the register-part onto the stack.
2374 In the special case of a DImode or DFmode that is split, we could put
2375 it together in a pseudoreg directly, but for now that's not worth
2377 if (data
->partial
!= 0)
2379 /* Handle calls that pass values in multiple non-contiguous
2380 locations. The Irix 6 ABI has examples of this. */
2381 if (GET_CODE (entry_parm
) == PARALLEL
)
2382 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2384 int_size_in_bytes (data
->passed_type
));
2387 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2388 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2389 data
->partial
/ UNITS_PER_WORD
);
2392 entry_parm
= stack_parm
;
2395 /* If we didn't decide this parm came in a register, by default it came
2397 else if (entry_parm
== NULL
)
2398 entry_parm
= stack_parm
;
2400 /* When an argument is passed in multiple locations, we can't make use
2401 of this information, but we can save some copying if the whole argument
2402 is passed in a single register. */
2403 else if (GET_CODE (entry_parm
) == PARALLEL
2404 && data
->nominal_mode
!= BLKmode
2405 && data
->passed_mode
!= BLKmode
)
2407 size_t i
, len
= XVECLEN (entry_parm
, 0);
2409 for (i
= 0; i
< len
; i
++)
2410 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2411 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2412 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2413 == data
->passed_mode
)
2414 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2416 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2421 data
->entry_parm
= entry_parm
;
2424 /* A subroutine of assign_parms. Reconstitute any values which were
2425 passed in multiple registers and would fit in a single register. */
2428 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2430 rtx entry_parm
= data
->entry_parm
;
2432 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2433 This can be done with register operations rather than on the
2434 stack, even if we will store the reconstituted parameter on the
2436 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2438 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2439 emit_group_store (parmreg
, entry_parm
, NULL_TREE
,
2440 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2441 entry_parm
= parmreg
;
2444 data
->entry_parm
= entry_parm
;
2447 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2448 always valid and properly aligned. */
2451 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2453 rtx stack_parm
= data
->stack_parm
;
2455 /* If we can't trust the parm stack slot to be aligned enough for its
2456 ultimate type, don't use that slot after entry. We'll make another
2457 stack slot, if we need one. */
2459 && ((STRICT_ALIGNMENT
2460 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2461 || (data
->nominal_type
2462 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2463 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2466 /* If parm was passed in memory, and we need to convert it on entry,
2467 don't store it back in that same slot. */
2468 else if (data
->entry_parm
== stack_parm
2469 && data
->nominal_mode
!= BLKmode
2470 && data
->nominal_mode
!= data
->passed_mode
)
2473 /* If stack protection is in effect for this function, don't leave any
2474 pointers in their passed stack slots. */
2475 else if (crtl
->stack_protect_guard
2476 && (flag_stack_protect
== 2
2477 || data
->passed_pointer
2478 || POINTER_TYPE_P (data
->nominal_type
)))
2481 data
->stack_parm
= stack_parm
;
2484 /* A subroutine of assign_parms. Return true if the current parameter
2485 should be stored as a BLKmode in the current frame. */
2488 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2490 if (data
->nominal_mode
== BLKmode
)
2492 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2495 #ifdef BLOCK_REG_PADDING
2496 /* Only assign_parm_setup_block knows how to deal with register arguments
2497 that are padded at the least significant end. */
2498 if (REG_P (data
->entry_parm
)
2499 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2500 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2501 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2508 /* A subroutine of assign_parms. Arrange for the parameter to be
2509 present and valid in DATA->STACK_RTL. */
2512 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2513 tree parm
, struct assign_parm_data_one
*data
)
2515 rtx entry_parm
= data
->entry_parm
;
2516 rtx stack_parm
= data
->stack_parm
;
2518 HOST_WIDE_INT size_stored
;
2520 if (GET_CODE (entry_parm
) == PARALLEL
)
2521 entry_parm
= emit_group_move_into_temps (entry_parm
);
2523 size
= int_size_in_bytes (data
->passed_type
);
2524 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2525 if (stack_parm
== 0)
2527 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2528 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2530 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2531 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2532 set_mem_attributes (stack_parm
, parm
, 1);
2535 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2536 calls that pass values in multiple non-contiguous locations. */
2537 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2541 /* Note that we will be storing an integral number of words.
2542 So we have to be careful to ensure that we allocate an
2543 integral number of words. We do this above when we call
2544 assign_stack_local if space was not allocated in the argument
2545 list. If it was, this will not work if PARM_BOUNDARY is not
2546 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2547 if it becomes a problem. Exception is when BLKmode arrives
2548 with arguments not conforming to word_mode. */
2550 if (data
->stack_parm
== 0)
2552 else if (GET_CODE (entry_parm
) == PARALLEL
)
2555 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2557 mem
= validize_mem (stack_parm
);
2559 /* Handle values in multiple non-contiguous locations. */
2560 if (GET_CODE (entry_parm
) == PARALLEL
)
2562 push_to_sequence2 (all
->first_conversion_insn
,
2563 all
->last_conversion_insn
);
2564 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2565 all
->first_conversion_insn
= get_insns ();
2566 all
->last_conversion_insn
= get_last_insn ();
2573 /* If SIZE is that of a mode no bigger than a word, just use
2574 that mode's store operation. */
2575 else if (size
<= UNITS_PER_WORD
)
2577 enum machine_mode mode
2578 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2581 #ifdef BLOCK_REG_PADDING
2582 && (size
== UNITS_PER_WORD
2583 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2584 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2590 /* We are really truncating a word_mode value containing
2591 SIZE bytes into a value of mode MODE. If such an
2592 operation requires no actual instructions, we can refer
2593 to the value directly in mode MODE, otherwise we must
2594 start with the register in word_mode and explicitly
2596 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2597 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2600 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2601 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2603 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2606 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2607 machine must be aligned to the left before storing
2608 to memory. Note that the previous test doesn't
2609 handle all cases (e.g. SIZE == 3). */
2610 else if (size
!= UNITS_PER_WORD
2611 #ifdef BLOCK_REG_PADDING
2612 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2620 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2621 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2623 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2624 build_int_cst (NULL_TREE
, by
),
2626 tem
= change_address (mem
, word_mode
, 0);
2627 emit_move_insn (tem
, x
);
2630 move_block_from_reg (REGNO (entry_parm
), mem
,
2631 size_stored
/ UNITS_PER_WORD
);
2634 move_block_from_reg (REGNO (entry_parm
), mem
,
2635 size_stored
/ UNITS_PER_WORD
);
2637 else if (data
->stack_parm
== 0)
2639 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2640 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2642 all
->first_conversion_insn
= get_insns ();
2643 all
->last_conversion_insn
= get_last_insn ();
2647 data
->stack_parm
= stack_parm
;
2648 SET_DECL_RTL (parm
, stack_parm
);
2651 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2652 parameter. Get it there. Perform all ABI specified conversions. */
2655 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2656 struct assign_parm_data_one
*data
)
2659 enum machine_mode promoted_nominal_mode
;
2660 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2661 bool did_conversion
= false;
2663 /* Store the parm in a pseudoregister during the function, but we may
2664 need to do it in a wider mode. */
2666 /* This is not really promoting for a call. However we need to be
2667 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2668 promoted_nominal_mode
2669 = promote_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
, 1);
2671 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2673 if (!DECL_ARTIFICIAL (parm
))
2674 mark_user_reg (parmreg
);
2676 /* If this was an item that we received a pointer to,
2677 set DECL_RTL appropriately. */
2678 if (data
->passed_pointer
)
2680 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2681 set_mem_attributes (x
, parm
, 1);
2682 SET_DECL_RTL (parm
, x
);
2685 SET_DECL_RTL (parm
, parmreg
);
2687 assign_parm_remove_parallels (data
);
2689 /* Copy the value into the register. */
2690 if (data
->nominal_mode
!= data
->passed_mode
2691 || promoted_nominal_mode
!= data
->promoted_mode
)
2695 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2696 mode, by the caller. We now have to convert it to
2697 NOMINAL_MODE, if different. However, PARMREG may be in
2698 a different mode than NOMINAL_MODE if it is being stored
2701 If ENTRY_PARM is a hard register, it might be in a register
2702 not valid for operating in its mode (e.g., an odd-numbered
2703 register for a DFmode). In that case, moves are the only
2704 thing valid, so we can't do a convert from there. This
2705 occurs when the calling sequence allow such misaligned
2708 In addition, the conversion may involve a call, which could
2709 clobber parameters which haven't been copied to pseudo
2710 registers yet. Therefore, we must first copy the parm to
2711 a pseudo reg here, and save the conversion until after all
2712 parameters have been moved. */
2714 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2716 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2718 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2719 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
2721 if (GET_CODE (tempreg
) == SUBREG
2722 && GET_MODE (tempreg
) == data
->nominal_mode
2723 && REG_P (SUBREG_REG (tempreg
))
2724 && data
->nominal_mode
== data
->passed_mode
2725 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
2726 && GET_MODE_SIZE (GET_MODE (tempreg
))
2727 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
2729 /* The argument is already sign/zero extended, so note it
2731 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
2732 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
2735 /* TREE_USED gets set erroneously during expand_assignment. */
2736 save_tree_used
= TREE_USED (parm
);
2737 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
2738 TREE_USED (parm
) = save_tree_used
;
2739 all
->first_conversion_insn
= get_insns ();
2740 all
->last_conversion_insn
= get_last_insn ();
2743 did_conversion
= true;
2746 emit_move_insn (parmreg
, validize_mem (data
->entry_parm
));
2748 /* If we were passed a pointer but the actual value can safely live
2749 in a register, put it in one. */
2750 if (data
->passed_pointer
2751 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
2752 /* If by-reference argument was promoted, demote it. */
2753 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
2754 || use_register_for_decl (parm
)))
2756 /* We can't use nominal_mode, because it will have been set to
2757 Pmode above. We must use the actual mode of the parm. */
2758 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
2759 mark_user_reg (parmreg
);
2761 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
2763 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
2764 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2766 push_to_sequence2 (all
->first_conversion_insn
,
2767 all
->last_conversion_insn
);
2768 emit_move_insn (tempreg
, DECL_RTL (parm
));
2769 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
2770 emit_move_insn (parmreg
, tempreg
);
2771 all
->first_conversion_insn
= get_insns ();
2772 all
->last_conversion_insn
= get_last_insn ();
2775 did_conversion
= true;
2778 emit_move_insn (parmreg
, DECL_RTL (parm
));
2780 SET_DECL_RTL (parm
, parmreg
);
2782 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2784 data
->stack_parm
= NULL
;
2787 /* Mark the register as eliminable if we did no conversion and it was
2788 copied from memory at a fixed offset, and the arg pointer was not
2789 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2790 offset formed an invalid address, such memory-equivalences as we
2791 make here would screw up life analysis for it. */
2792 if (data
->nominal_mode
== data
->passed_mode
2794 && data
->stack_parm
!= 0
2795 && MEM_P (data
->stack_parm
)
2796 && data
->locate
.offset
.var
== 0
2797 && reg_mentioned_p (virtual_incoming_args_rtx
,
2798 XEXP (data
->stack_parm
, 0)))
2800 rtx linsn
= get_last_insn ();
2803 /* Mark complex types separately. */
2804 if (GET_CODE (parmreg
) == CONCAT
)
2806 enum machine_mode submode
2807 = GET_MODE_INNER (GET_MODE (parmreg
));
2808 int regnor
= REGNO (XEXP (parmreg
, 0));
2809 int regnoi
= REGNO (XEXP (parmreg
, 1));
2810 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
2811 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
2812 GET_MODE_SIZE (submode
));
2814 /* Scan backwards for the set of the real and
2816 for (sinsn
= linsn
; sinsn
!= 0;
2817 sinsn
= prev_nonnote_insn (sinsn
))
2819 set
= single_set (sinsn
);
2823 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
2824 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
2825 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
2826 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
2829 else if ((set
= single_set (linsn
)) != 0
2830 && SET_DEST (set
) == parmreg
)
2831 set_unique_reg_note (linsn
, REG_EQUIV
, data
->stack_parm
);
2834 /* For pointer data type, suggest pointer register. */
2835 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
2836 mark_reg_pointer (parmreg
,
2837 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
2840 /* A subroutine of assign_parms. Allocate stack space to hold the current
2841 parameter. Get it there. Perform all ABI specified conversions. */
2844 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
2845 struct assign_parm_data_one
*data
)
2847 /* Value must be stored in the stack slot STACK_PARM during function
2849 bool to_conversion
= false;
2851 assign_parm_remove_parallels (data
);
2853 if (data
->promoted_mode
!= data
->nominal_mode
)
2855 /* Conversion is required. */
2856 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
2858 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
2860 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2861 to_conversion
= true;
2863 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
2864 TYPE_UNSIGNED (TREE_TYPE (parm
)));
2866 if (data
->stack_parm
)
2867 /* ??? This may need a big-endian conversion on sparc64. */
2869 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
2872 if (data
->entry_parm
!= data
->stack_parm
)
2876 if (data
->stack_parm
== 0)
2879 = assign_stack_local (GET_MODE (data
->entry_parm
),
2880 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
2881 TYPE_ALIGN (data
->passed_type
));
2882 set_mem_attributes (data
->stack_parm
, parm
, 1);
2885 dest
= validize_mem (data
->stack_parm
);
2886 src
= validize_mem (data
->entry_parm
);
2890 /* Use a block move to handle potentially misaligned entry_parm. */
2892 push_to_sequence2 (all
->first_conversion_insn
,
2893 all
->last_conversion_insn
);
2894 to_conversion
= true;
2896 emit_block_move (dest
, src
,
2897 GEN_INT (int_size_in_bytes (data
->passed_type
)),
2901 emit_move_insn (dest
, src
);
2906 all
->first_conversion_insn
= get_insns ();
2907 all
->last_conversion_insn
= get_last_insn ();
2911 SET_DECL_RTL (parm
, data
->stack_parm
);
2914 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2915 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2918 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
, tree fnargs
)
2921 tree orig_fnargs
= all
->orig_fnargs
;
2923 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
2925 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
2926 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
2928 rtx tmp
, real
, imag
;
2929 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
2931 real
= DECL_RTL (fnargs
);
2932 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
2933 if (inner
!= GET_MODE (real
))
2935 real
= gen_lowpart_SUBREG (inner
, real
);
2936 imag
= gen_lowpart_SUBREG (inner
, imag
);
2939 if (TREE_ADDRESSABLE (parm
))
2942 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
2944 /* split_complex_arg put the real and imag parts in
2945 pseudos. Move them to memory. */
2946 tmp
= assign_stack_local (DECL_MODE (parm
), size
,
2947 TYPE_ALIGN (TREE_TYPE (parm
)));
2948 set_mem_attributes (tmp
, parm
, 1);
2949 rmem
= adjust_address_nv (tmp
, inner
, 0);
2950 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
2951 push_to_sequence2 (all
->first_conversion_insn
,
2952 all
->last_conversion_insn
);
2953 emit_move_insn (rmem
, real
);
2954 emit_move_insn (imem
, imag
);
2955 all
->first_conversion_insn
= get_insns ();
2956 all
->last_conversion_insn
= get_last_insn ();
2960 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2961 SET_DECL_RTL (parm
, tmp
);
2963 real
= DECL_INCOMING_RTL (fnargs
);
2964 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
2965 if (inner
!= GET_MODE (real
))
2967 real
= gen_lowpart_SUBREG (inner
, real
);
2968 imag
= gen_lowpart_SUBREG (inner
, imag
);
2970 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
2971 set_decl_incoming_rtl (parm
, tmp
, false);
2972 fnargs
= TREE_CHAIN (fnargs
);
2976 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
2977 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
), false);
2979 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2980 instead of the copy of decl, i.e. FNARGS. */
2981 if (DECL_INCOMING_RTL (parm
) && MEM_P (DECL_INCOMING_RTL (parm
)))
2982 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
2985 fnargs
= TREE_CHAIN (fnargs
);
2989 /* Assign RTL expressions to the function's parameters. This may involve
2990 copying them into registers and using those registers as the DECL_RTL. */
2993 assign_parms (tree fndecl
)
2995 struct assign_parm_data_all all
;
2998 crtl
->args
.internal_arg_pointer
2999 = targetm
.calls
.internal_arg_pointer ();
3001 assign_parms_initialize_all (&all
);
3002 fnargs
= assign_parms_augmented_arg_list (&all
);
3004 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3006 struct assign_parm_data_one data
;
3008 /* Extract the type of PARM; adjust it according to ABI. */
3009 assign_parm_find_data_types (&all
, parm
, &data
);
3011 /* Early out for errors and void parameters. */
3012 if (data
.passed_mode
== VOIDmode
)
3014 SET_DECL_RTL (parm
, const0_rtx
);
3015 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3019 /* Estimate stack alignment from parameter alignment. */
3020 if (SUPPORTS_STACK_ALIGNMENT
)
3022 unsigned int align
= FUNCTION_ARG_BOUNDARY (data
.promoted_mode
,
3024 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3025 align
= TYPE_ALIGN (data
.passed_type
);
3026 if (crtl
->stack_alignment_estimated
< align
)
3028 gcc_assert (!crtl
->stack_realign_processed
);
3029 crtl
->stack_alignment_estimated
= align
;
3033 if (cfun
->stdarg
&& !TREE_CHAIN (parm
))
3034 assign_parms_setup_varargs (&all
, &data
, false);
3036 /* Find out where the parameter arrives in this function. */
3037 assign_parm_find_entry_rtl (&all
, &data
);
3039 /* Find out where stack space for this parameter might be. */
3040 if (assign_parm_is_stack_parm (&all
, &data
))
3042 assign_parm_find_stack_rtl (parm
, &data
);
3043 assign_parm_adjust_entry_rtl (&data
);
3046 /* Record permanently how this parm was passed. */
3047 set_decl_incoming_rtl (parm
, data
.entry_parm
, data
.passed_pointer
);
3049 /* Update info on where next arg arrives in registers. */
3050 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3051 data
.passed_type
, data
.named_arg
);
3053 assign_parm_adjust_stack_rtl (&data
);
3055 if (assign_parm_setup_block_p (&data
))
3056 assign_parm_setup_block (&all
, parm
, &data
);
3057 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3058 assign_parm_setup_reg (&all
, parm
, &data
);
3060 assign_parm_setup_stack (&all
, parm
, &data
);
3063 if (targetm
.calls
.split_complex_arg
&& fnargs
!= all
.orig_fnargs
)
3064 assign_parms_unsplit_complex (&all
, fnargs
);
3066 /* Output all parameter conversion instructions (possibly including calls)
3067 now that all parameters have been copied out of hard registers. */
3068 emit_insn (all
.first_conversion_insn
);
3070 /* Estimate reload stack alignment from scalar return mode. */
3071 if (SUPPORTS_STACK_ALIGNMENT
)
3073 if (DECL_RESULT (fndecl
))
3075 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3076 enum machine_mode mode
= TYPE_MODE (type
);
3080 && !AGGREGATE_TYPE_P (type
))
3082 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3083 if (crtl
->stack_alignment_estimated
< align
)
3085 gcc_assert (!crtl
->stack_realign_processed
);
3086 crtl
->stack_alignment_estimated
= align
;
3092 /* If we are receiving a struct value address as the first argument, set up
3093 the RTL for the function result. As this might require code to convert
3094 the transmitted address to Pmode, we do this here to ensure that possible
3095 preliminary conversions of the address have been emitted already. */
3096 if (all
.function_result_decl
)
3098 tree result
= DECL_RESULT (current_function_decl
);
3099 rtx addr
= DECL_RTL (all
.function_result_decl
);
3102 if (DECL_BY_REFERENCE (result
))
3106 addr
= convert_memory_address (Pmode
, addr
);
3107 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3108 set_mem_attributes (x
, result
, 1);
3110 SET_DECL_RTL (result
, x
);
3113 /* We have aligned all the args, so add space for the pretend args. */
3114 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3115 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3116 crtl
->args
.size
= all
.stack_args_size
.constant
;
3118 /* Adjust function incoming argument size for alignment and
3121 #ifdef REG_PARM_STACK_SPACE
3122 crtl
->args
.size
= MAX (crtl
->args
.size
,
3123 REG_PARM_STACK_SPACE (fndecl
));
3126 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3127 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3129 #ifdef ARGS_GROW_DOWNWARD
3130 crtl
->args
.arg_offset_rtx
3131 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3132 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3133 size_int (-all
.stack_args_size
.constant
)),
3134 NULL_RTX
, VOIDmode
, 0));
3136 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3139 /* See how many bytes, if any, of its args a function should try to pop
3142 crtl
->args
.pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
3145 /* For stdarg.h function, save info about
3146 regs and stack space used by the named args. */
3148 crtl
->args
.info
= all
.args_so_far
;
3150 /* Set the rtx used for the function return value. Put this in its
3151 own variable so any optimizers that need this information don't have
3152 to include tree.h. Do this here so it gets done when an inlined
3153 function gets output. */
3156 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3157 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3159 /* If scalar return value was computed in a pseudo-reg, or was a named
3160 return value that got dumped to the stack, copy that to the hard
3162 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3164 tree decl_result
= DECL_RESULT (fndecl
);
3165 rtx decl_rtl
= DECL_RTL (decl_result
);
3167 if (REG_P (decl_rtl
)
3168 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3169 : DECL_REGISTER (decl_result
))
3173 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3175 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3176 /* The delay slot scheduler assumes that crtl->return_rtx
3177 holds the hard register containing the return value, not a
3178 temporary pseudo. */
3179 crtl
->return_rtx
= real_decl_rtl
;
3184 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3185 For all seen types, gimplify their sizes. */
3188 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3195 if (POINTER_TYPE_P (t
))
3197 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3198 && !TYPE_SIZES_GIMPLIFIED (t
))
3200 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3208 /* Gimplify the parameter list for current_function_decl. This involves
3209 evaluating SAVE_EXPRs of variable sized parameters and generating code
3210 to implement callee-copies reference parameters. Returns a sequence of
3211 statements to add to the beginning of the function. */
3214 gimplify_parameters (void)
3216 struct assign_parm_data_all all
;
3218 gimple_seq stmts
= NULL
;
3220 assign_parms_initialize_all (&all
);
3221 fnargs
= assign_parms_augmented_arg_list (&all
);
3223 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3225 struct assign_parm_data_one data
;
3227 /* Extract the type of PARM; adjust it according to ABI. */
3228 assign_parm_find_data_types (&all
, parm
, &data
);
3230 /* Early out for errors and void parameters. */
3231 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3234 /* Update info on where next arg arrives in registers. */
3235 FUNCTION_ARG_ADVANCE (all
.args_so_far
, data
.promoted_mode
,
3236 data
.passed_type
, data
.named_arg
);
3238 /* ??? Once upon a time variable_size stuffed parameter list
3239 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3240 turned out to be less than manageable in the gimple world.
3241 Now we have to hunt them down ourselves. */
3242 walk_tree_without_duplicates (&data
.passed_type
,
3243 gimplify_parm_type
, &stmts
);
3245 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3247 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3248 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3251 if (data
.passed_pointer
)
3253 tree type
= TREE_TYPE (data
.passed_type
);
3254 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3255 type
, data
.named_arg
))
3259 /* For constant-sized objects, this is trivial; for
3260 variable-sized objects, we have to play games. */
3261 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3262 && !(flag_stack_check
== GENERIC_STACK_CHECK
3263 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3264 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3266 local
= create_tmp_var (type
, get_name (parm
));
3267 DECL_IGNORED_P (local
) = 0;
3271 tree ptr_type
, addr
;
3273 ptr_type
= build_pointer_type (type
);
3274 addr
= create_tmp_var (ptr_type
, get_name (parm
));
3275 DECL_IGNORED_P (addr
) = 0;
3276 local
= build_fold_indirect_ref (addr
);
3278 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3279 t
= build_call_expr (t
, 1, DECL_SIZE_UNIT (parm
));
3280 t
= fold_convert (ptr_type
, t
);
3281 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3282 gimplify_and_add (t
, &stmts
);
3285 gimplify_assign (local
, parm
, &stmts
);
3287 SET_DECL_VALUE_EXPR (parm
, local
);
3288 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3296 /* Compute the size and offset from the start of the stacked arguments for a
3297 parm passed in mode PASSED_MODE and with type TYPE.
3299 INITIAL_OFFSET_PTR points to the current offset into the stacked
3302 The starting offset and size for this parm are returned in
3303 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3304 nonzero, the offset is that of stack slot, which is returned in
3305 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3306 padding required from the initial offset ptr to the stack slot.
3308 IN_REGS is nonzero if the argument will be passed in registers. It will
3309 never be set if REG_PARM_STACK_SPACE is not defined.
3311 FNDECL is the function in which the argument was defined.
3313 There are two types of rounding that are done. The first, controlled by
3314 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3315 list to be aligned to the specific boundary (in bits). This rounding
3316 affects the initial and starting offsets, but not the argument size.
3318 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3319 optionally rounds the size of the parm to PARM_BOUNDARY. The
3320 initial offset is not affected by this rounding, while the size always
3321 is and the starting offset may be. */
3323 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3324 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3325 callers pass in the total size of args so far as
3326 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3329 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3330 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3331 struct args_size
*initial_offset_ptr
,
3332 struct locate_and_pad_arg_data
*locate
)
3335 enum direction where_pad
;
3336 unsigned int boundary
;
3337 int reg_parm_stack_space
= 0;
3338 int part_size_in_regs
;
3340 #ifdef REG_PARM_STACK_SPACE
3341 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3343 /* If we have found a stack parm before we reach the end of the
3344 area reserved for registers, skip that area. */
3347 if (reg_parm_stack_space
> 0)
3349 if (initial_offset_ptr
->var
)
3351 initial_offset_ptr
->var
3352 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3353 ssize_int (reg_parm_stack_space
));
3354 initial_offset_ptr
->constant
= 0;
3356 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3357 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3360 #endif /* REG_PARM_STACK_SPACE */
3362 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3365 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3366 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3367 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
3368 locate
->where_pad
= where_pad
;
3370 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3371 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3372 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3374 locate
->boundary
= boundary
;
3376 if (SUPPORTS_STACK_ALIGNMENT
)
3378 /* stack_alignment_estimated can't change after stack has been
3380 if (crtl
->stack_alignment_estimated
< boundary
)
3382 if (!crtl
->stack_realign_processed
)
3383 crtl
->stack_alignment_estimated
= boundary
;
3386 /* If stack is realigned and stack alignment value
3387 hasn't been finalized, it is OK not to increase
3388 stack_alignment_estimated. The bigger alignment
3389 requirement is recorded in stack_alignment_needed
3391 gcc_assert (!crtl
->stack_realign_finalized
3392 && crtl
->stack_realign_needed
);
3397 /* Remember if the outgoing parameter requires extra alignment on the
3398 calling function side. */
3399 if (crtl
->stack_alignment_needed
< boundary
)
3400 crtl
->stack_alignment_needed
= boundary
;
3401 if (crtl
->max_used_stack_slot_alignment
< crtl
->stack_alignment_needed
)
3402 crtl
->max_used_stack_slot_alignment
= crtl
->stack_alignment_needed
;
3403 if (crtl
->preferred_stack_boundary
< boundary
)
3404 crtl
->preferred_stack_boundary
= boundary
;
3406 #ifdef ARGS_GROW_DOWNWARD
3407 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3408 if (initial_offset_ptr
->var
)
3409 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3410 initial_offset_ptr
->var
);
3414 if (where_pad
!= none
3415 && (!host_integerp (sizetree
, 1)
3416 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3417 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3418 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3421 locate
->slot_offset
.constant
+= part_size_in_regs
;
3424 #ifdef REG_PARM_STACK_SPACE
3425 || REG_PARM_STACK_SPACE (fndecl
) > 0
3428 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3429 &locate
->alignment_pad
);
3431 locate
->size
.constant
= (-initial_offset_ptr
->constant
3432 - locate
->slot_offset
.constant
);
3433 if (initial_offset_ptr
->var
)
3434 locate
->size
.var
= size_binop (MINUS_EXPR
,
3435 size_binop (MINUS_EXPR
,
3437 initial_offset_ptr
->var
),
3438 locate
->slot_offset
.var
);
3440 /* Pad_below needs the pre-rounded size to know how much to pad
3442 locate
->offset
= locate
->slot_offset
;
3443 if (where_pad
== downward
)
3444 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3446 #else /* !ARGS_GROW_DOWNWARD */
3448 #ifdef REG_PARM_STACK_SPACE
3449 || REG_PARM_STACK_SPACE (fndecl
) > 0
3452 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3453 &locate
->alignment_pad
);
3454 locate
->slot_offset
= *initial_offset_ptr
;
3456 #ifdef PUSH_ROUNDING
3457 if (passed_mode
!= BLKmode
)
3458 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3461 /* Pad_below needs the pre-rounded size to know how much to pad below
3462 so this must be done before rounding up. */
3463 locate
->offset
= locate
->slot_offset
;
3464 if (where_pad
== downward
)
3465 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3467 if (where_pad
!= none
3468 && (!host_integerp (sizetree
, 1)
3469 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3470 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3472 ADD_PARM_SIZE (locate
->size
, sizetree
);
3474 locate
->size
.constant
-= part_size_in_regs
;
3475 #endif /* ARGS_GROW_DOWNWARD */
3478 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3479 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3482 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3483 struct args_size
*alignment_pad
)
3485 tree save_var
= NULL_TREE
;
3486 HOST_WIDE_INT save_constant
= 0;
3487 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3488 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3490 #ifdef SPARC_STACK_BOUNDARY_HACK
3491 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3492 the real alignment of %sp. However, when it does this, the
3493 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3494 if (SPARC_STACK_BOUNDARY_HACK
)
3498 if (boundary
> PARM_BOUNDARY
)
3500 save_var
= offset_ptr
->var
;
3501 save_constant
= offset_ptr
->constant
;
3504 alignment_pad
->var
= NULL_TREE
;
3505 alignment_pad
->constant
= 0;
3507 if (boundary
> BITS_PER_UNIT
)
3509 if (offset_ptr
->var
)
3511 tree sp_offset_tree
= ssize_int (sp_offset
);
3512 tree offset
= size_binop (PLUS_EXPR
,
3513 ARGS_SIZE_TREE (*offset_ptr
),
3515 #ifdef ARGS_GROW_DOWNWARD
3516 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3518 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3521 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3522 /* ARGS_SIZE_TREE includes constant term. */
3523 offset_ptr
->constant
= 0;
3524 if (boundary
> PARM_BOUNDARY
)
3525 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3530 offset_ptr
->constant
= -sp_offset
+
3531 #ifdef ARGS_GROW_DOWNWARD
3532 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3534 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3536 if (boundary
> PARM_BOUNDARY
)
3537 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3543 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3545 if (passed_mode
!= BLKmode
)
3547 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3548 offset_ptr
->constant
3549 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3550 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3551 - GET_MODE_SIZE (passed_mode
));
3555 if (TREE_CODE (sizetree
) != INTEGER_CST
3556 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3558 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3559 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3561 ADD_PARM_SIZE (*offset_ptr
, s2
);
3562 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3568 /* True if register REGNO was alive at a place where `setjmp' was
3569 called and was set more than once or is an argument. Such regs may
3570 be clobbered by `longjmp'. */
3573 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3575 /* There appear to be cases where some local vars never reach the
3576 backend but have bogus regnos. */
3577 if (regno
>= max_reg_num ())
3580 return ((REG_N_SETS (regno
) > 1
3581 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
3582 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3585 /* Walk the tree of blocks describing the binding levels within a
3586 function and warn about variables the might be killed by setjmp or
3587 vfork. This is done after calling flow_analysis before register
3588 allocation since that will clobber the pseudo-regs to hard
3592 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3596 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
3598 if (TREE_CODE (decl
) == VAR_DECL
3599 && DECL_RTL_SET_P (decl
)
3600 && REG_P (DECL_RTL (decl
))
3601 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3602 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
3603 " %<longjmp%> or %<vfork%>", decl
);
3606 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3607 setjmp_vars_warning (setjmp_crosses
, sub
);
3610 /* Do the appropriate part of setjmp_vars_warning
3611 but for arguments instead of local variables. */
3614 setjmp_args_warning (bitmap setjmp_crosses
)
3617 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3618 decl
; decl
= TREE_CHAIN (decl
))
3619 if (DECL_RTL (decl
) != 0
3620 && REG_P (DECL_RTL (decl
))
3621 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3622 warning (OPT_Wclobbered
,
3623 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3627 /* Generate warning messages for variables live across setjmp. */
3630 generate_setjmp_warnings (void)
3632 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
3634 if (n_basic_blocks
== NUM_FIXED_BLOCKS
3635 || bitmap_empty_p (setjmp_crosses
))
3638 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
3639 setjmp_args_warning (setjmp_crosses
);
3643 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3644 and create duplicate blocks. */
3645 /* ??? Need an option to either create block fragments or to create
3646 abstract origin duplicates of a source block. It really depends
3647 on what optimization has been performed. */
3650 reorder_blocks (void)
3652 tree block
= DECL_INITIAL (current_function_decl
);
3653 VEC(tree
,heap
) *block_stack
;
3655 if (block
== NULL_TREE
)
3658 block_stack
= VEC_alloc (tree
, heap
, 10);
3660 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3661 clear_block_marks (block
);
3663 /* Prune the old trees away, so that they don't get in the way. */
3664 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
3665 BLOCK_CHAIN (block
) = NULL_TREE
;
3667 /* Recreate the block tree from the note nesting. */
3668 reorder_blocks_1 (get_insns (), block
, &block_stack
);
3669 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
3671 VEC_free (tree
, heap
, block_stack
);
3674 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3677 clear_block_marks (tree block
)
3681 TREE_ASM_WRITTEN (block
) = 0;
3682 clear_block_marks (BLOCK_SUBBLOCKS (block
));
3683 block
= BLOCK_CHAIN (block
);
3688 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
3692 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3696 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
3698 tree block
= NOTE_BLOCK (insn
);
3701 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
3702 ? BLOCK_FRAGMENT_ORIGIN (block
)
3705 /* If we have seen this block before, that means it now
3706 spans multiple address regions. Create a new fragment. */
3707 if (TREE_ASM_WRITTEN (block
))
3709 tree new_block
= copy_node (block
);
3711 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
3712 BLOCK_FRAGMENT_CHAIN (new_block
)
3713 = BLOCK_FRAGMENT_CHAIN (origin
);
3714 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
3716 NOTE_BLOCK (insn
) = new_block
;
3720 BLOCK_SUBBLOCKS (block
) = 0;
3721 TREE_ASM_WRITTEN (block
) = 1;
3722 /* When there's only one block for the entire function,
3723 current_block == block and we mustn't do this, it
3724 will cause infinite recursion. */
3725 if (block
!= current_block
)
3727 if (block
!= origin
)
3728 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
);
3730 BLOCK_SUPERCONTEXT (block
) = current_block
;
3731 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
3732 BLOCK_SUBBLOCKS (current_block
) = block
;
3733 current_block
= origin
;
3735 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
3737 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
3739 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
3740 BLOCK_SUBBLOCKS (current_block
)
3741 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
3742 current_block
= BLOCK_SUPERCONTEXT (current_block
);
3748 /* Reverse the order of elements in the chain T of blocks,
3749 and return the new head of the chain (old last element). */
3752 blocks_nreverse (tree t
)
3754 tree prev
= 0, decl
, next
;
3755 for (decl
= t
; decl
; decl
= next
)
3757 next
= BLOCK_CHAIN (decl
);
3758 BLOCK_CHAIN (decl
) = prev
;
3764 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3765 non-NULL, list them all into VECTOR, in a depth-first preorder
3766 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3770 all_blocks (tree block
, tree
*vector
)
3776 TREE_ASM_WRITTEN (block
) = 0;
3778 /* Record this block. */
3780 vector
[n_blocks
] = block
;
3784 /* Record the subblocks, and their subblocks... */
3785 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
3786 vector
? vector
+ n_blocks
: 0);
3787 block
= BLOCK_CHAIN (block
);
3793 /* Return a vector containing all the blocks rooted at BLOCK. The
3794 number of elements in the vector is stored in N_BLOCKS_P. The
3795 vector is dynamically allocated; it is the caller's responsibility
3796 to call `free' on the pointer returned. */
3799 get_block_vector (tree block
, int *n_blocks_p
)
3803 *n_blocks_p
= all_blocks (block
, NULL
);
3804 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
3805 all_blocks (block
, block_vector
);
3807 return block_vector
;
3810 static GTY(()) int next_block_index
= 2;
3812 /* Set BLOCK_NUMBER for all the blocks in FN. */
3815 number_blocks (tree fn
)
3821 /* For SDB and XCOFF debugging output, we start numbering the blocks
3822 from 1 within each function, rather than keeping a running
3824 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3825 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
3826 next_block_index
= 1;
3829 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
3831 /* The top-level BLOCK isn't numbered at all. */
3832 for (i
= 1; i
< n_blocks
; ++i
)
3833 /* We number the blocks from two. */
3834 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
3836 free (block_vector
);
3841 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3844 debug_find_var_in_block_tree (tree var
, tree block
)
3848 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
3852 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
3854 tree ret
= debug_find_var_in_block_tree (var
, t
);
3862 /* Keep track of whether we're in a dummy function context. If we are,
3863 we don't want to invoke the set_current_function hook, because we'll
3864 get into trouble if the hook calls target_reinit () recursively or
3865 when the initial initialization is not yet complete. */
3867 static bool in_dummy_function
;
3869 /* Invoke the target hook when setting cfun. Update the optimization options
3870 if the function uses different options than the default. */
3873 invoke_set_current_function_hook (tree fndecl
)
3875 if (!in_dummy_function
)
3877 tree opts
= ((fndecl
)
3878 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
3879 : optimization_default_node
);
3882 opts
= optimization_default_node
;
3884 /* Change optimization options if needed. */
3885 if (optimization_current_node
!= opts
)
3887 optimization_current_node
= opts
;
3888 cl_optimization_restore (TREE_OPTIMIZATION (opts
));
3891 targetm
.set_current_function (fndecl
);
3895 /* cfun should never be set directly; use this function. */
3898 set_cfun (struct function
*new_cfun
)
3900 if (cfun
!= new_cfun
)
3903 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
3907 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3909 static VEC(function_p
,heap
) *cfun_stack
;
3911 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3914 push_cfun (struct function
*new_cfun
)
3916 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
3917 set_cfun (new_cfun
);
3920 /* Pop cfun from the stack. */
3925 struct function
*new_cfun
= VEC_pop (function_p
, cfun_stack
);
3926 set_cfun (new_cfun
);
3929 /* Return value of funcdef and increase it. */
3931 get_next_funcdef_no (void)
3933 return funcdef_no
++;
3936 /* Allocate a function structure for FNDECL and set its contents
3937 to the defaults. Set cfun to the newly-allocated object.
3938 Some of the helper functions invoked during initialization assume
3939 that cfun has already been set. Therefore, assign the new object
3940 directly into cfun and invoke the back end hook explicitly at the
3941 very end, rather than initializing a temporary and calling set_cfun
3944 ABSTRACT_P is true if this is a function that will never be seen by
3945 the middle-end. Such functions are front-end concepts (like C++
3946 function templates) that do not correspond directly to functions
3947 placed in object files. */
3950 allocate_struct_function (tree fndecl
, bool abstract_p
)
3953 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
3955 cfun
= GGC_CNEW (struct function
);
3957 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
3959 init_eh_for_function ();
3961 if (init_machine_status
)
3962 cfun
->machine
= (*init_machine_status
) ();
3964 #ifdef OVERRIDE_ABI_FORMAT
3965 OVERRIDE_ABI_FORMAT (fndecl
);
3968 if (fndecl
!= NULL_TREE
)
3970 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
3971 cfun
->decl
= fndecl
;
3972 current_function_funcdef_no
= get_next_funcdef_no ();
3974 result
= DECL_RESULT (fndecl
);
3975 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
3977 #ifdef PCC_STATIC_STRUCT_RETURN
3978 cfun
->returns_pcc_struct
= 1;
3980 cfun
->returns_struct
= 1;
3985 && TYPE_ARG_TYPES (fntype
) != 0
3986 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3987 != void_type_node
));
3989 /* Assume all registers in stdarg functions need to be saved. */
3990 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
3991 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
3994 invoke_set_current_function_hook (fndecl
);
3997 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3998 instead of just setting it. */
4001 push_struct_function (tree fndecl
)
4003 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4004 allocate_struct_function (fndecl
, false);
4007 /* Reset cfun, and other non-struct-function variables to defaults as
4008 appropriate for emitting rtl at the start of a function. */
4011 prepare_function_start (void)
4013 gcc_assert (!crtl
->emit
.x_last_insn
);
4015 init_varasm_status ();
4017 default_rtl_profile ();
4019 cse_not_expected
= ! optimize
;
4021 /* Caller save not needed yet. */
4022 caller_save_needed
= 0;
4024 /* We haven't done register allocation yet. */
4027 /* Indicate that we have not instantiated virtual registers yet. */
4028 virtuals_instantiated
= 0;
4030 /* Indicate that we want CONCATs now. */
4031 generating_concat_p
= 1;
4033 /* Indicate we have no need of a frame pointer yet. */
4034 frame_pointer_needed
= 0;
4037 /* Initialize the rtl expansion mechanism so that we can do simple things
4038 like generate sequences. This is used to provide a context during global
4039 initialization of some passes. You must call expand_dummy_function_end
4040 to exit this context. */
4043 init_dummy_function_start (void)
4045 gcc_assert (!in_dummy_function
);
4046 in_dummy_function
= true;
4047 push_struct_function (NULL_TREE
);
4048 prepare_function_start ();
4051 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4052 and initialize static variables for generating RTL for the statements
4056 init_function_start (tree subr
)
4058 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4059 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4061 allocate_struct_function (subr
, false);
4062 prepare_function_start ();
4064 /* Warn if this value is an aggregate type,
4065 regardless of which calling convention we are using for it. */
4066 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4067 warning (OPT_Waggregate_return
, "function returns an aggregate");
4070 /* Make sure all values used by the optimization passes have sane
4073 init_function_for_compilation (void)
4077 /* No prologue/epilogue insns yet. Make sure that these vectors are
4079 gcc_assert (VEC_length (int, prologue
) == 0);
4080 gcc_assert (VEC_length (int, epilogue
) == 0);
4081 gcc_assert (VEC_length (int, sibcall_epilogue
) == 0);
4085 struct rtl_opt_pass pass_init_function
=
4091 init_function_for_compilation
, /* execute */
4094 0, /* static_pass_number */
4096 0, /* properties_required */
4097 0, /* properties_provided */
4098 0, /* properties_destroyed */
4099 0, /* todo_flags_start */
4100 0 /* todo_flags_finish */
4106 expand_main_function (void)
4108 #if (defined(INVOKE__main) \
4109 || (!defined(HAS_INIT_SECTION) \
4110 && !defined(INIT_SECTION_ASM_OP) \
4111 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4112 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4116 /* Expand code to initialize the stack_protect_guard. This is invoked at
4117 the beginning of a function to be protected. */
4119 #ifndef HAVE_stack_protect_set
4120 # define HAVE_stack_protect_set 0
4121 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4125 stack_protect_prologue (void)
4127 tree guard_decl
= targetm
.stack_protect_guard ();
4130 /* Avoid expand_expr here, because we don't want guard_decl pulled
4131 into registers unless absolutely necessary. And we know that
4132 crtl->stack_protect_guard is a local stack slot, so this skips
4134 x
= validize_mem (DECL_RTL (crtl
->stack_protect_guard
));
4135 y
= validize_mem (DECL_RTL (guard_decl
));
4137 /* Allow the target to copy from Y to X without leaking Y into a
4139 if (HAVE_stack_protect_set
)
4141 rtx insn
= gen_stack_protect_set (x
, y
);
4149 /* Otherwise do a straight move. */
4150 emit_move_insn (x
, y
);
4153 /* Expand code to verify the stack_protect_guard. This is invoked at
4154 the end of a function to be protected. */
4156 #ifndef HAVE_stack_protect_test
4157 # define HAVE_stack_protect_test 0
4158 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4162 stack_protect_epilogue (void)
4164 tree guard_decl
= targetm
.stack_protect_guard ();
4165 rtx label
= gen_label_rtx ();
4168 /* Avoid expand_expr here, because we don't want guard_decl pulled
4169 into registers unless absolutely necessary. And we know that
4170 crtl->stack_protect_guard is a local stack slot, so this skips
4172 x
= validize_mem (DECL_RTL (crtl
->stack_protect_guard
));
4173 y
= validize_mem (DECL_RTL (guard_decl
));
4175 /* Allow the target to compare Y with X without leaking either into
4177 switch (HAVE_stack_protect_test
!= 0)
4180 tmp
= gen_stack_protect_test (x
, y
, label
);
4189 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4193 /* The noreturn predictor has been moved to the tree level. The rtl-level
4194 predictors estimate this branch about 20%, which isn't enough to get
4195 things moved out of line. Since this is the only extant case of adding
4196 a noreturn function at the rtl level, it doesn't seem worth doing ought
4197 except adding the prediction by hand. */
4198 tmp
= get_last_insn ();
4200 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4202 expand_expr_stmt (targetm
.stack_protect_fail ());
4206 /* Start the RTL for a new function, and set variables used for
4208 SUBR is the FUNCTION_DECL node.
4209 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4210 the function's parameters, which must be run at any return statement. */
4213 expand_function_start (tree subr
)
4215 /* Make sure volatile mem refs aren't considered
4216 valid operands of arithmetic insns. */
4217 init_recog_no_volatile ();
4221 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4224 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4226 /* Make the label for return statements to jump to. Do not special
4227 case machines with special return instructions -- they will be
4228 handled later during jump, ifcvt, or epilogue creation. */
4229 return_label
= gen_label_rtx ();
4231 /* Initialize rtx used to return the value. */
4232 /* Do this before assign_parms so that we copy the struct value address
4233 before any library calls that assign parms might generate. */
4235 /* Decide whether to return the value in memory or in a register. */
4236 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4238 /* Returning something that won't go in a register. */
4239 rtx value_address
= 0;
4241 #ifdef PCC_STATIC_STRUCT_RETURN
4242 if (cfun
->returns_pcc_struct
)
4244 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4245 value_address
= assemble_static_space (size
);
4250 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4251 /* Expect to be passed the address of a place to store the value.
4252 If it is passed as an argument, assign_parms will take care of
4256 value_address
= gen_reg_rtx (Pmode
);
4257 emit_move_insn (value_address
, sv
);
4262 rtx x
= value_address
;
4263 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4265 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4266 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4268 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4271 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4272 /* If return mode is void, this decl rtl should not be used. */
4273 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4276 /* Compute the return values into a pseudo reg, which we will copy
4277 into the true return register after the cleanups are done. */
4278 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4279 if (TYPE_MODE (return_type
) != BLKmode
4280 && targetm
.calls
.return_in_msb (return_type
))
4281 /* expand_function_end will insert the appropriate padding in
4282 this case. Use the return value's natural (unpadded) mode
4283 within the function proper. */
4284 SET_DECL_RTL (DECL_RESULT (subr
),
4285 gen_reg_rtx (TYPE_MODE (return_type
)));
4288 /* In order to figure out what mode to use for the pseudo, we
4289 figure out what the mode of the eventual return register will
4290 actually be, and use that. */
4291 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4293 /* Structures that are returned in registers are not
4294 aggregate_value_p, so we may see a PARALLEL or a REG. */
4295 if (REG_P (hard_reg
))
4296 SET_DECL_RTL (DECL_RESULT (subr
),
4297 gen_reg_rtx (GET_MODE (hard_reg
)));
4300 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4301 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4305 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4306 result to the real return register(s). */
4307 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4310 /* Initialize rtx for parameters and local variables.
4311 In some cases this requires emitting insns. */
4312 assign_parms (subr
);
4314 /* If function gets a static chain arg, store it. */
4315 if (cfun
->static_chain_decl
)
4317 tree parm
= cfun
->static_chain_decl
;
4318 rtx local
= gen_reg_rtx (Pmode
);
4320 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
, false);
4321 SET_DECL_RTL (parm
, local
);
4322 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4324 emit_move_insn (local
, static_chain_incoming_rtx
);
4327 /* If the function receives a non-local goto, then store the
4328 bits we need to restore the frame pointer. */
4329 if (cfun
->nonlocal_goto_save_area
)
4334 /* ??? We need to do this save early. Unfortunately here is
4335 before the frame variable gets declared. Help out... */
4336 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4337 if (!DECL_RTL_SET_P (var
))
4340 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4341 cfun
->nonlocal_goto_save_area
,
4342 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4343 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4344 r_save
= convert_memory_address (Pmode
, r_save
);
4346 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4347 update_nonlocal_goto_save_area ();
4350 /* The following was moved from init_function_start.
4351 The move is supposed to make sdb output more accurate. */
4352 /* Indicate the beginning of the function body,
4353 as opposed to parm setup. */
4354 emit_note (NOTE_INSN_FUNCTION_BEG
);
4356 gcc_assert (NOTE_P (get_last_insn ()));
4358 parm_birth_insn
= get_last_insn ();
4363 PROFILE_HOOK (current_function_funcdef_no
);
4367 /* After the display initializations is where the stack checking
4369 if(flag_stack_check
)
4370 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4372 /* Make sure there is a line number after the function entry setup code. */
4373 force_next_line_note ();
4376 /* Undo the effects of init_dummy_function_start. */
4378 expand_dummy_function_end (void)
4380 gcc_assert (in_dummy_function
);
4382 /* End any sequences that failed to be closed due to syntax errors. */
4383 while (in_sequence_p ())
4386 /* Outside function body, can't compute type's actual size
4387 until next function's body starts. */
4389 free_after_parsing (cfun
);
4390 free_after_compilation (cfun
);
4392 in_dummy_function
= false;
4395 /* Call DOIT for each hard register used as a return value from
4396 the current function. */
4399 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4401 rtx outgoing
= crtl
->return_rtx
;
4406 if (REG_P (outgoing
))
4407 (*doit
) (outgoing
, arg
);
4408 else if (GET_CODE (outgoing
) == PARALLEL
)
4412 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4414 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4416 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4423 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4429 clobber_return_register (void)
4431 diddle_return_value (do_clobber_return_reg
, NULL
);
4433 /* In case we do use pseudo to return value, clobber it too. */
4434 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4436 tree decl_result
= DECL_RESULT (current_function_decl
);
4437 rtx decl_rtl
= DECL_RTL (decl_result
);
4438 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4440 do_clobber_return_reg (decl_rtl
, NULL
);
4446 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4452 use_return_register (void)
4454 diddle_return_value (do_use_return_reg
, NULL
);
4457 /* Possibly warn about unused parameters. */
4459 do_warn_unused_parameter (tree fn
)
4463 for (decl
= DECL_ARGUMENTS (fn
);
4464 decl
; decl
= TREE_CHAIN (decl
))
4465 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4466 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4467 && !TREE_NO_WARNING (decl
))
4468 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4471 static GTY(()) rtx initial_trampoline
;
4473 /* Generate RTL for the end of the current function. */
4476 expand_function_end (void)
4480 /* If arg_pointer_save_area was referenced only from a nested
4481 function, we will not have initialized it yet. Do that now. */
4482 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4483 get_arg_pointer_save_area ();
4485 /* If we are doing generic stack checking and this function makes calls,
4486 do a stack probe at the start of the function to ensure we have enough
4487 space for another stack frame. */
4488 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4492 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4496 probe_stack_range (STACK_OLD_CHECK_PROTECT
,
4497 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
4500 emit_insn_before (seq
, stack_check_probe_note
);
4505 /* End any sequences that failed to be closed due to syntax errors. */
4506 while (in_sequence_p ())
4509 clear_pending_stack_adjust ();
4510 do_pending_stack_adjust ();
4512 /* Output a linenumber for the end of the function.
4513 SDB depends on this. */
4514 force_next_line_note ();
4515 set_curr_insn_source_location (input_location
);
4517 /* Before the return label (if any), clobber the return
4518 registers so that they are not propagated live to the rest of
4519 the function. This can only happen with functions that drop
4520 through; if there had been a return statement, there would
4521 have either been a return rtx, or a jump to the return label.
4523 We delay actual code generation after the current_function_value_rtx
4525 clobber_after
= get_last_insn ();
4527 /* Output the label for the actual return from the function. */
4528 emit_label (return_label
);
4530 if (USING_SJLJ_EXCEPTIONS
)
4532 /* Let except.c know where it should emit the call to unregister
4533 the function context for sjlj exceptions. */
4534 if (flag_exceptions
)
4535 sjlj_emit_function_exit_after (get_last_insn ());
4539 /* We want to ensure that instructions that may trap are not
4540 moved into the epilogue by scheduling, because we don't
4541 always emit unwind information for the epilogue. */
4542 if (flag_non_call_exceptions
)
4543 emit_insn (gen_blockage ());
4546 /* If this is an implementation of throw, do what's necessary to
4547 communicate between __builtin_eh_return and the epilogue. */
4548 expand_eh_return ();
4550 /* If scalar return value was computed in a pseudo-reg, or was a named
4551 return value that got dumped to the stack, copy that to the hard
4553 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4555 tree decl_result
= DECL_RESULT (current_function_decl
);
4556 rtx decl_rtl
= DECL_RTL (decl_result
);
4558 if (REG_P (decl_rtl
)
4559 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4560 : DECL_REGISTER (decl_result
))
4562 rtx real_decl_rtl
= crtl
->return_rtx
;
4564 /* This should be set in assign_parms. */
4565 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4567 /* If this is a BLKmode structure being returned in registers,
4568 then use the mode computed in expand_return. Note that if
4569 decl_rtl is memory, then its mode may have been changed,
4570 but that crtl->return_rtx has not. */
4571 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4572 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4574 /* If a non-BLKmode return value should be padded at the least
4575 significant end of the register, shift it left by the appropriate
4576 amount. BLKmode results are handled using the group load/store
4578 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
4579 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
4581 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
4582 REGNO (real_decl_rtl
)),
4584 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
4586 /* If a named return value dumped decl_return to memory, then
4587 we may need to re-do the PROMOTE_MODE signed/unsigned
4589 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4591 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4593 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
4594 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
4597 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4599 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4601 /* If expand_function_start has created a PARALLEL for decl_rtl,
4602 move the result to the real return registers. Otherwise, do
4603 a group load from decl_rtl for a named return. */
4604 if (GET_CODE (decl_rtl
) == PARALLEL
)
4605 emit_group_move (real_decl_rtl
, decl_rtl
);
4607 emit_group_load (real_decl_rtl
, decl_rtl
,
4608 TREE_TYPE (decl_result
),
4609 int_size_in_bytes (TREE_TYPE (decl_result
)));
4611 /* In the case of complex integer modes smaller than a word, we'll
4612 need to generate some non-trivial bitfield insertions. Do that
4613 on a pseudo and not the hard register. */
4614 else if (GET_CODE (decl_rtl
) == CONCAT
4615 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
4616 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
4618 int old_generating_concat_p
;
4621 old_generating_concat_p
= generating_concat_p
;
4622 generating_concat_p
= 0;
4623 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
4624 generating_concat_p
= old_generating_concat_p
;
4626 emit_move_insn (tmp
, decl_rtl
);
4627 emit_move_insn (real_decl_rtl
, tmp
);
4630 emit_move_insn (real_decl_rtl
, decl_rtl
);
4634 /* If returning a structure, arrange to return the address of the value
4635 in a place where debuggers expect to find it.
4637 If returning a structure PCC style,
4638 the caller also depends on this value.
4639 And cfun->returns_pcc_struct is not necessarily set. */
4640 if (cfun
->returns_struct
4641 || cfun
->returns_pcc_struct
)
4643 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
4644 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
4647 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
4648 type
= TREE_TYPE (type
);
4650 value_address
= XEXP (value_address
, 0);
4652 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
4653 current_function_decl
, true);
4655 /* Mark this as a function return value so integrate will delete the
4656 assignment and USE below when inlining this function. */
4657 REG_FUNCTION_VALUE_P (outgoing
) = 1;
4659 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4660 value_address
= convert_memory_address (GET_MODE (outgoing
),
4663 emit_move_insn (outgoing
, value_address
);
4665 /* Show return register used to hold result (in this case the address
4667 crtl
->return_rtx
= outgoing
;
4670 /* Emit the actual code to clobber return register. */
4675 clobber_return_register ();
4676 expand_naked_return ();
4680 emit_insn_after (seq
, clobber_after
);
4683 /* Output the label for the naked return from the function. */
4684 emit_label (naked_return_label
);
4686 /* @@@ This is a kludge. We want to ensure that instructions that
4687 may trap are not moved into the epilogue by scheduling, because
4688 we don't always emit unwind information for the epilogue. */
4689 if (! USING_SJLJ_EXCEPTIONS
&& flag_non_call_exceptions
)
4690 emit_insn (gen_blockage ());
4692 /* If stack protection is enabled for this function, check the guard. */
4693 if (crtl
->stack_protect_guard
)
4694 stack_protect_epilogue ();
4696 /* If we had calls to alloca, and this machine needs
4697 an accurate stack pointer to exit the function,
4698 insert some code to save and restore the stack pointer. */
4699 if (! EXIT_IGNORE_STACK
4700 && cfun
->calls_alloca
)
4704 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
4705 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
4708 /* ??? This should no longer be necessary since stupid is no longer with
4709 us, but there are some parts of the compiler (eg reload_combine, and
4710 sh mach_dep_reorg) that still try and compute their own lifetime info
4711 instead of using the general framework. */
4712 use_return_register ();
4716 get_arg_pointer_save_area (void)
4718 rtx ret
= arg_pointer_save_area
;
4722 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
4723 arg_pointer_save_area
= ret
;
4726 if (! crtl
->arg_pointer_save_area_init
)
4730 /* Save the arg pointer at the beginning of the function. The
4731 generated stack slot may not be a valid memory address, so we
4732 have to check it and fix it if necessary. */
4734 emit_move_insn (validize_mem (ret
),
4735 crtl
->args
.internal_arg_pointer
);
4739 push_topmost_sequence ();
4740 emit_insn_after (seq
, entry_of_function ());
4741 pop_topmost_sequence ();
4747 /* Extend a vector that records the INSN_UIDs of INSNS
4748 (a list of one or more insns). */
4751 record_insns (rtx insns
, VEC(int,heap
) **vecp
)
4755 for (tmp
= insns
; tmp
!= NULL_RTX
; tmp
= NEXT_INSN (tmp
))
4756 VEC_safe_push (int, heap
, *vecp
, INSN_UID (tmp
));
4759 /* Set the locator of the insn chain starting at INSN to LOC. */
4761 set_insn_locators (rtx insn
, int loc
)
4763 while (insn
!= NULL_RTX
)
4766 INSN_LOCATOR (insn
) = loc
;
4767 insn
= NEXT_INSN (insn
);
4771 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4772 be running after reorg, SEQUENCE rtl is possible. */
4775 contains (const_rtx insn
, VEC(int,heap
) **vec
)
4779 if (NONJUMP_INSN_P (insn
)
4780 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4783 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
4784 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4785 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
))
4786 == VEC_index (int, *vec
, j
))
4792 for (j
= VEC_length (int, *vec
) - 1; j
>= 0; --j
)
4793 if (INSN_UID (insn
) == VEC_index (int, *vec
, j
))
4800 prologue_epilogue_contains (const_rtx insn
)
4802 if (contains (insn
, &prologue
))
4804 if (contains (insn
, &epilogue
))
4810 sibcall_epilogue_contains (const_rtx insn
)
4812 if (sibcall_epilogue
)
4813 return contains (insn
, &sibcall_epilogue
);
4818 /* Insert gen_return at the end of block BB. This also means updating
4819 block_for_insn appropriately. */
4822 emit_return_into_block (basic_block bb
)
4824 emit_jump_insn_after (gen_return (), BB_END (bb
));
4826 #endif /* HAVE_return */
4828 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4829 this into place with notes indicating where the prologue ends and where
4830 the epilogue begins. Update the basic block information when possible. */
4833 thread_prologue_and_epilogue_insns (void)
4837 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4840 #if defined (HAVE_epilogue) || defined(HAVE_return)
4841 rtx epilogue_end
= NULL_RTX
;
4845 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
4846 #ifdef HAVE_prologue
4850 seq
= gen_prologue ();
4853 /* Insert an explicit USE for the frame pointer
4854 if the profiling is on and the frame pointer is required. */
4855 if (crtl
->profile
&& frame_pointer_needed
)
4856 emit_use (hard_frame_pointer_rtx
);
4858 /* Retain a map of the prologue insns. */
4859 record_insns (seq
, &prologue
);
4860 emit_note (NOTE_INSN_PROLOGUE_END
);
4862 #ifndef PROFILE_BEFORE_PROLOGUE
4863 /* Ensure that instructions are not moved into the prologue when
4864 profiling is on. The call to the profiling routine can be
4865 emitted within the live range of a call-clobbered register. */
4867 emit_insn (gen_blockage ());
4872 set_insn_locators (seq
, prologue_locator
);
4874 /* Can't deal with multiple successors of the entry block
4875 at the moment. Function should always have at least one
4877 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
4879 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR
));
4884 /* If the exit block has no non-fake predecessors, we don't need
4886 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4887 if ((e
->flags
& EDGE_FAKE
) == 0)
4892 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
4894 if (optimize
&& HAVE_return
)
4896 /* If we're allowed to generate a simple return instruction,
4897 then by definition we don't need a full epilogue. Examine
4898 the block that falls through to EXIT. If it does not
4899 contain any code, examine its predecessors and try to
4900 emit (conditional) return instructions. */
4905 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4906 if (e
->flags
& EDGE_FALLTHRU
)
4912 /* Verify that there are no active instructions in the last block. */
4913 label
= BB_END (last
);
4914 while (label
&& !LABEL_P (label
))
4916 if (active_insn_p (label
))
4918 label
= PREV_INSN (label
);
4921 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
4925 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
4927 basic_block bb
= e
->src
;
4930 if (bb
== ENTRY_BLOCK_PTR
)
4937 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
4943 /* If we have an unconditional jump, we can replace that
4944 with a simple return instruction. */
4945 if (simplejump_p (jump
))
4947 emit_return_into_block (bb
);
4951 /* If we have a conditional jump, we can try to replace
4952 that with a conditional return instruction. */
4953 else if (condjump_p (jump
))
4955 if (! redirect_jump (jump
, 0, 0))
4961 /* If this block has only one successor, it both jumps
4962 and falls through to the fallthru block, so we can't
4964 if (single_succ_p (bb
))
4976 /* Fix up the CFG for the successful change we just made. */
4977 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
4980 /* Emit a return insn for the exit fallthru block. Whether
4981 this is still reachable will be determined later. */
4983 emit_barrier_after (BB_END (last
));
4984 emit_return_into_block (last
);
4985 epilogue_end
= BB_END (last
);
4986 single_succ_edge (last
)->flags
&= ~EDGE_FALLTHRU
;
4991 /* Find the edge that falls through to EXIT. Other edges may exist
4992 due to RETURN instructions, but those don't need epilogues.
4993 There really shouldn't be a mixture -- either all should have
4994 been converted or none, however... */
4996 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4997 if (e
->flags
& EDGE_FALLTHRU
)
5002 #ifdef HAVE_epilogue
5006 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5007 seq
= gen_epilogue ();
5008 emit_jump_insn (seq
);
5010 /* Retain a map of the epilogue insns. */
5011 record_insns (seq
, &epilogue
);
5012 set_insn_locators (seq
, epilogue_locator
);
5017 insert_insn_on_edge (seq
, e
);
5025 if (! next_active_insn (BB_END (e
->src
)))
5027 /* We have a fall-through edge to the exit block, the source is not
5028 at the end of the function, and there will be an assembler epilogue
5029 at the end of the function.
5030 We can't use force_nonfallthru here, because that would try to
5031 use return. Inserting a jump 'by hand' is extremely messy, so
5032 we take advantage of cfg_layout_finalize using
5033 fixup_fallthru_exit_predecessor. */
5034 cfg_layout_initialize (0);
5035 FOR_EACH_BB (cur_bb
)
5036 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5037 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5038 cur_bb
->aux
= cur_bb
->next_bb
;
5039 cfg_layout_finalize ();
5042 default_rtl_profile ();
5046 commit_edge_insertions ();
5048 /* The epilogue insns we inserted may cause the exit edge to no longer
5050 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5052 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5053 && returnjump_p (BB_END (e
->src
)))
5054 e
->flags
&= ~EDGE_FALLTHRU
;
5058 #ifdef HAVE_sibcall_epilogue
5059 /* Emit sibling epilogues before any sibling call sites. */
5060 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5062 basic_block bb
= e
->src
;
5063 rtx insn
= BB_END (bb
);
5066 || ! SIBLING_CALL_P (insn
))
5073 emit_insn (gen_sibcall_epilogue ());
5077 /* Retain a map of the epilogue insns. Used in life analysis to
5078 avoid getting rid of sibcall epilogue insns. Do this before we
5079 actually emit the sequence. */
5080 record_insns (seq
, &sibcall_epilogue
);
5081 set_insn_locators (seq
, epilogue_locator
);
5083 emit_insn_before (seq
, insn
);
5088 #ifdef HAVE_epilogue
5093 /* Similarly, move any line notes that appear after the epilogue.
5094 There is no need, however, to be quite so anal about the existence
5095 of such a note. Also possibly move
5096 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5098 for (insn
= epilogue_end
; insn
; insn
= next
)
5100 next
= NEXT_INSN (insn
);
5102 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
5103 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5108 /* Threading the prologue and epilogue changes the artificial refs
5109 in the entry and exit blocks. */
5110 epilogue_completed
= 1;
5111 df_update_entry_exit_and_calls ();
5114 /* Reposition the prologue-end and epilogue-begin notes after instruction
5115 scheduling and delayed branch scheduling. */
5118 reposition_prologue_and_epilogue_notes (void)
5120 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5121 rtx insn
, last
, note
;
5124 if ((len
= VEC_length (int, prologue
)) > 0)
5128 /* Scan from the beginning until we reach the last prologue insn.
5129 We apparently can't depend on basic_block_{head,end} after
5131 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5135 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
5138 else if (contains (insn
, &prologue
))
5148 /* Find the prologue-end note if we haven't already, and
5149 move it to just after the last prologue insn. */
5152 for (note
= last
; (note
= NEXT_INSN (note
));)
5154 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
5158 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5160 last
= NEXT_INSN (last
);
5161 reorder_insns (note
, note
, last
);
5165 if ((len
= VEC_length (int, epilogue
)) > 0)
5169 /* Scan from the end until we reach the first epilogue insn.
5170 We apparently can't depend on basic_block_{head,end} after
5172 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
5176 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5179 else if (contains (insn
, &epilogue
))
5189 /* Find the epilogue-begin note if we haven't already, and
5190 move it to just before the first epilogue insn. */
5193 for (note
= insn
; (note
= PREV_INSN (note
));)
5195 && NOTE_KIND (note
) == NOTE_INSN_EPILOGUE_BEG
)
5199 if (PREV_INSN (last
) != note
)
5200 reorder_insns (note
, note
, PREV_INSN (last
));
5203 #endif /* HAVE_prologue or HAVE_epilogue */
5206 /* Returns the name of the current function. */
5208 current_function_name (void)
5210 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5213 /* Returns the raw (mangled) name of the current function. */
5215 current_function_assembler_name (void)
5217 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun
->decl
));
5222 rest_of_handle_check_leaf_regs (void)
5224 #ifdef LEAF_REGISTERS
5225 current_function_uses_only_leaf_regs
5226 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
5231 /* Insert a TYPE into the used types hash table of CFUN. */
5233 used_types_insert_helper (tree type
, struct function
*func
)
5235 if (type
!= NULL
&& func
!= NULL
)
5239 if (func
->used_types_hash
== NULL
)
5240 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
5241 htab_eq_pointer
, NULL
);
5242 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
5248 /* Given a type, insert it into the used hash table in cfun. */
5250 used_types_insert (tree t
)
5252 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
5254 t
= TYPE_MAIN_VARIANT (t
);
5255 if (debug_info_level
> DINFO_LEVEL_NONE
)
5256 used_types_insert_helper (t
, cfun
);
5259 struct rtl_opt_pass pass_leaf_regs
=
5265 rest_of_handle_check_leaf_regs
, /* execute */
5268 0, /* static_pass_number */
5270 0, /* properties_required */
5271 0, /* properties_provided */
5272 0, /* properties_destroyed */
5273 0, /* todo_flags_start */
5274 0 /* todo_flags_finish */
5279 rest_of_handle_thread_prologue_and_epilogue (void)
5282 cleanup_cfg (CLEANUP_EXPENSIVE
);
5283 /* On some machines, the prologue and epilogue code, or parts thereof,
5284 can be represented as RTL. Doing so lets us schedule insns between
5285 it and the rest of the code and also allows delayed branch
5286 scheduling to operate in the epilogue. */
5288 thread_prologue_and_epilogue_insns ();
5292 struct rtl_opt_pass pass_thread_prologue_and_epilogue
=
5296 "pro_and_epilogue", /* name */
5298 rest_of_handle_thread_prologue_and_epilogue
, /* execute */
5301 0, /* static_pass_number */
5302 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
5303 0, /* properties_required */
5304 0, /* properties_provided */
5305 0, /* properties_destroyed */
5306 TODO_verify_flow
, /* todo_flags_start */
5309 TODO_df_finish
| TODO_verify_rtl_sharing
|
5310 TODO_ggc_collect
/* todo_flags_finish */
5315 /* This mini-pass fixes fall-out from SSA in asm statements that have
5316 in-out constraints. Say you start with
5319 asm ("": "+mr" (inout));
5322 which is transformed very early to use explicit output and match operands:
5325 asm ("": "=mr" (inout) : "0" (inout));
5328 Or, after SSA and copyprop,
5330 asm ("": "=mr" (inout_2) : "0" (inout_1));
5333 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5334 they represent two separate values, so they will get different pseudo
5335 registers during expansion. Then, since the two operands need to match
5336 per the constraints, but use different pseudo registers, reload can
5337 only register a reload for these operands. But reloads can only be
5338 satisfied by hardregs, not by memory, so we need a register for this
5339 reload, just because we are presented with non-matching operands.
5340 So, even though we allow memory for this operand, no memory can be
5341 used for it, just because the two operands don't match. This can
5342 cause reload failures on register-starved targets.
5344 So it's a symptom of reload not being able to use memory for reloads
5345 or, alternatively it's also a symptom of both operands not coming into
5346 reload as matching (in which case the pseudo could go to memory just
5347 fine, as the alternative allows it, and no reload would be necessary).
5348 We fix the latter problem here, by transforming
5350 asm ("": "=mr" (inout_2) : "0" (inout_1));
5355 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5358 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
5361 bool changed
= false;
5362 rtx op
= SET_SRC (p_sets
[0]);
5363 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
5364 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
5365 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
5367 memset (output_matched
, 0, noutputs
* sizeof (bool));
5368 for (i
= 0; i
< ninputs
; i
++)
5370 rtx input
, output
, insns
;
5371 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
5375 match
= strtoul (constraint
, &end
, 10);
5376 if (end
== constraint
)
5379 gcc_assert (match
< noutputs
);
5380 output
= SET_DEST (p_sets
[match
]);
5381 input
= RTVEC_ELT (inputs
, i
);
5382 /* Only do the transformation for pseudos. */
5383 if (! REG_P (output
)
5384 || rtx_equal_p (output
, input
)
5385 || (GET_MODE (input
) != VOIDmode
5386 && GET_MODE (input
) != GET_MODE (output
)))
5389 /* We can't do anything if the output is also used as input,
5390 as we're going to overwrite it. */
5391 for (j
= 0; j
< ninputs
; j
++)
5392 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
5397 /* Avoid changing the same input several times. For
5398 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5399 only change in once (to out1), rather than changing it
5400 first to out1 and afterwards to out2. */
5403 for (j
= 0; j
< noutputs
; j
++)
5404 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
5409 output_matched
[match
] = true;
5412 emit_move_insn (output
, input
);
5413 insns
= get_insns ();
5415 emit_insn_before (insns
, insn
);
5417 /* Now replace all mentions of the input with output. We can't
5418 just replace the occurrence in inputs[i], as the register might
5419 also be used in some other input (or even in an address of an
5420 output), which would mean possibly increasing the number of
5421 inputs by one (namely 'output' in addition), which might pose
5422 a too complicated problem for reload to solve. E.g. this situation:
5424 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5426 Here 'input' is used in two occurrences as input (once for the
5427 input operand, once for the address in the second output operand).
5428 If we would replace only the occurrence of the input operand (to
5429 make the matching) we would be left with this:
5432 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5434 Now we suddenly have two different input values (containing the same
5435 value, but different pseudos) where we formerly had only one.
5436 With more complicated asms this might lead to reload failures
5437 which wouldn't have happen without this pass. So, iterate over
5438 all operands and replace all occurrences of the register used. */
5439 for (j
= 0; j
< noutputs
; j
++)
5440 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
5441 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
5442 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
5444 for (j
= 0; j
< ninputs
; j
++)
5445 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
5446 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
5453 df_insn_rescan (insn
);
5457 rest_of_match_asm_constraints (void)
5460 rtx insn
, pat
, *p_sets
;
5463 if (!crtl
->has_asm_statement
)
5466 df_set_flags (DF_DEFER_INSN_RESCAN
);
5469 FOR_BB_INSNS (bb
, insn
)
5474 pat
= PATTERN (insn
);
5475 if (GET_CODE (pat
) == PARALLEL
)
5476 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
5477 else if (GET_CODE (pat
) == SET
)
5478 p_sets
= &PATTERN (insn
), noutputs
= 1;
5482 if (GET_CODE (*p_sets
) == SET
5483 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
5484 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
5488 return TODO_df_finish
;
5491 struct rtl_opt_pass pass_match_asm_constraints
=
5495 "asmcons", /* name */
5497 rest_of_match_asm_constraints
, /* execute */
5500 0, /* static_pass_number */
5502 0, /* properties_required */
5503 0, /* properties_provided */
5504 0, /* properties_destroyed */
5505 0, /* todo_flags_start */
5506 TODO_dump_func
/* todo_flags_finish */
5511 #include "gt-function.h"