1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
43 #include "coretypes.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
58 #include "basic-block.h"
63 #include "integrate.h"
64 #include "langhooks.h"
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
108 int current_function_is_leaf
;
110 /* Nonzero if function being compiled doesn't contain any instructions
111 that can throw an exception. This is set prior to final. */
113 int current_function_nothrow
;
115 /* Nonzero if function being compiled doesn't modify the stack pointer
116 (ignoring the prologue and epilogue). This is only valid after
117 life_analysis has run. */
118 int current_function_sp_is_unchanging
;
120 /* Nonzero if the function being compiled is a leaf function which only
121 uses leaf registers. This is valid after reload (specifically after
122 sched2) and is useful only if the port defines LEAF_REGISTERS. */
123 int current_function_uses_only_leaf_regs
;
125 /* Nonzero once virtual register instantiation has been done.
126 assign_stack_local uses frame_pointer_rtx when this is nonzero.
127 calls.c:emit_library_call_value_1 uses it to set up
128 post-instantiation libcalls. */
129 int virtuals_instantiated
;
131 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
132 static GTY(()) int funcdef_no
;
134 /* These variables hold pointers to functions to create and destroy
135 target specific, per-function data structures. */
136 struct machine_function
* (*init_machine_status
) (void);
138 /* The currently compiled function. */
139 struct function
*cfun
= 0;
141 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
142 static GTY(()) varray_type prologue
;
143 static GTY(()) varray_type epilogue
;
145 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
147 static GTY(()) varray_type sibcall_epilogue
;
149 /* In order to evaluate some expressions, such as function calls returning
150 structures in memory, we need to temporarily allocate stack locations.
151 We record each allocated temporary in the following structure.
153 Associated with each temporary slot is a nesting level. When we pop up
154 one level, all temporaries associated with the previous level are freed.
155 Normally, all temporaries are freed after the execution of the statement
156 in which they were created. However, if we are inside a ({...}) grouping,
157 the result may be in a temporary and hence must be preserved. If the
158 result could be in a temporary, we preserve it if we can determine which
159 one it is in. If we cannot determine which temporary may contain the
160 result, all temporaries are preserved. A temporary is preserved by
161 pretending it was allocated at the previous nesting level.
163 Automatic variables are also assigned temporary slots, at the nesting
164 level where they are defined. They are marked a "kept" so that
165 free_temp_slots will not free them. */
167 struct temp_slot
GTY(())
169 /* Points to next temporary slot. */
170 struct temp_slot
*next
;
171 /* Points to previous temporary slot. */
172 struct temp_slot
*prev
;
174 /* The rtx to used to reference the slot. */
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 /* The alignment (in bits) of the slot. */
181 /* The size, in units, of the slot. */
183 /* The type of the object in the slot, or zero if it doesn't correspond
184 to a type. We use this to determine whether a slot can be reused.
185 It can be reused if objects of the type of the new slot will always
186 conflict with objects of the type of the old slot. */
188 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
190 /* Nonzero if this temporary is currently in use. */
192 /* Nonzero if this temporary has its address taken. */
194 /* Nesting level at which this slot is being used. */
196 /* Nonzero if this should survive a call to free_temp_slots. */
198 /* The offset of the slot from the frame_pointer, including extra space
199 for alignment. This info is for combine_temp_slots. */
200 HOST_WIDE_INT base_offset
;
201 /* The size of the slot, including extra space for alignment. This
202 info is for combine_temp_slots. */
203 HOST_WIDE_INT full_size
;
206 /* This structure is used to record MEMs or pseudos used to replace VAR, any
207 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
208 maintain this list in case two operands of an insn were required to match;
209 in that case we must ensure we use the same replacement. */
211 struct fixup_replacement
GTY(())
215 struct fixup_replacement
*next
;
218 struct insns_for_mem_entry
222 /* These are the INSNs which reference the MEM. */
226 /* Forward declarations. */
228 static rtx
assign_stack_local_1 (enum machine_mode
, HOST_WIDE_INT
, int,
230 static struct temp_slot
*find_temp_slot_from_address (rtx
);
231 static void put_reg_into_stack (struct function
*, rtx
, tree
, enum machine_mode
,
232 unsigned int, bool, bool, bool, htab_t
);
233 static void schedule_fixup_var_refs (struct function
*, rtx
, tree
, enum machine_mode
,
235 static void fixup_var_refs (rtx
, enum machine_mode
, int, rtx
, htab_t
);
236 static struct fixup_replacement
237 *find_fixup_replacement (struct fixup_replacement
**, rtx
);
238 static void fixup_var_refs_insns (rtx
, rtx
, enum machine_mode
, int, int, rtx
);
239 static void fixup_var_refs_insns_with_hash (htab_t
, rtx
, enum machine_mode
, int, rtx
);
240 static void fixup_var_refs_insn (rtx
, rtx
, enum machine_mode
, int, int, rtx
);
241 static void fixup_var_refs_1 (rtx
, enum machine_mode
, rtx
*, rtx
,
242 struct fixup_replacement
**, rtx
);
243 static rtx
fixup_memory_subreg (rtx
, rtx
, enum machine_mode
, int);
244 static rtx
walk_fixup_memory_subreg (rtx
, rtx
, enum machine_mode
, int);
245 static rtx
fixup_stack_1 (rtx
, rtx
);
246 static void optimize_bit_field (rtx
, rtx
, rtx
*);
247 static void instantiate_decls (tree
, int);
248 static void instantiate_decls_1 (tree
, int);
249 static void instantiate_decl (rtx
, HOST_WIDE_INT
, int);
250 static rtx
instantiate_new_reg (rtx
, HOST_WIDE_INT
*);
251 static int instantiate_virtual_regs_1 (rtx
*, rtx
, int);
252 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
253 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
254 static tree
*identify_blocks_1 (rtx
, tree
*, tree
*, tree
*);
255 static void reorder_blocks_1 (rtx
, tree
, varray_type
*);
256 static void reorder_fix_fragments (tree
);
257 static int all_blocks (tree
, tree
*);
258 static tree
*get_block_vector (tree
, int *);
259 extern tree
debug_find_var_in_block_tree (tree
, tree
);
260 /* We always define `record_insns' even if it's not used so that we
261 can always export `prologue_epilogue_contains'. */
262 static void record_insns (rtx
, varray_type
*) ATTRIBUTE_UNUSED
;
263 static int contains (rtx
, varray_type
);
265 static void emit_return_into_block (basic_block
, rtx
);
267 static void put_addressof_into_stack (rtx
, htab_t
);
268 static bool purge_addressof_1 (rtx
*, rtx
, int, int, int, htab_t
);
269 static void purge_single_hard_subreg_set (rtx
);
270 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
271 static rtx
keep_stack_depressed (rtx
);
273 static int is_addressof (rtx
*, void *);
274 static hashval_t
insns_for_mem_hash (const void *);
275 static int insns_for_mem_comp (const void *, const void *);
276 static int insns_for_mem_walk (rtx
*, void *);
277 static void compute_insns_for_mem (rtx
, rtx
, htab_t
);
278 static void prepare_function_start (tree
);
279 static void do_clobber_return_reg (rtx
, void *);
280 static void do_use_return_reg (rtx
, void *);
281 static void instantiate_virtual_regs_lossage (rtx
);
282 static tree
split_complex_args (tree
);
283 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
285 /* Pointer to chain of `struct function' for containing functions. */
286 struct function
*outer_function_chain
;
288 /* List of insns that were postponed by purge_addressof_1. */
289 static rtx postponed_insns
;
291 /* Given a function decl for a containing function,
292 return the `struct function' for it. */
295 find_function_data (tree decl
)
299 for (p
= outer_function_chain
; p
; p
= p
->outer
)
306 /* Save the current context for compilation of a nested function.
307 This is called from language-specific code. The caller should use
308 the enter_nested langhook to save any language-specific state,
309 since this function knows only about language-independent
313 push_function_context_to (tree context
)
319 if (context
== current_function_decl
)
320 cfun
->contains_functions
= 1;
323 struct function
*containing
= find_function_data (context
);
324 containing
->contains_functions
= 1;
329 init_dummy_function_start ();
332 p
->outer
= outer_function_chain
;
333 outer_function_chain
= p
;
334 p
->fixup_var_refs_queue
= 0;
336 lang_hooks
.function
.enter_nested (p
);
342 push_function_context (void)
344 push_function_context_to (current_function_decl
);
347 /* Restore the last saved context, at the end of a nested function.
348 This function is called from language-specific code. */
351 pop_function_context_from (tree context ATTRIBUTE_UNUSED
)
353 struct function
*p
= outer_function_chain
;
354 struct var_refs_queue
*queue
;
357 outer_function_chain
= p
->outer
;
359 current_function_decl
= p
->decl
;
362 restore_emit_status (p
);
364 lang_hooks
.function
.leave_nested (p
);
366 /* Finish doing put_var_into_stack for any of our variables which became
367 addressable during the nested function. If only one entry has to be
368 fixed up, just do that one. Otherwise, first make a list of MEMs that
369 are not to be unshared. */
370 if (p
->fixup_var_refs_queue
== 0)
372 else if (p
->fixup_var_refs_queue
->next
== 0)
373 fixup_var_refs (p
->fixup_var_refs_queue
->modified
,
374 p
->fixup_var_refs_queue
->promoted_mode
,
375 p
->fixup_var_refs_queue
->unsignedp
,
376 p
->fixup_var_refs_queue
->modified
, 0);
381 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
382 list
= gen_rtx_EXPR_LIST (VOIDmode
, queue
->modified
, list
);
384 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
385 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
386 queue
->unsignedp
, list
, 0);
390 p
->fixup_var_refs_queue
= 0;
392 /* Reset variables that have known state during rtx generation. */
393 rtx_equal_function_value_matters
= 1;
394 virtuals_instantiated
= 0;
395 generating_concat_p
= 1;
399 pop_function_context (void)
401 pop_function_context_from (current_function_decl
);
404 /* Clear out all parts of the state in F that can safely be discarded
405 after the function has been parsed, but not compiled, to let
406 garbage collection reclaim the memory. */
409 free_after_parsing (struct function
*f
)
411 /* f->expr->forced_labels is used by code generation. */
412 /* f->emit->regno_reg_rtx is used by code generation. */
413 /* f->varasm is used by code generation. */
414 /* f->eh->eh_return_stub_label is used by code generation. */
416 lang_hooks
.function
.final (f
);
420 /* Clear out all parts of the state in F that can safely be discarded
421 after the function has been compiled, to let garbage collection
422 reclaim the memory. */
425 free_after_compilation (struct function
*f
)
433 f
->x_avail_temp_slots
= NULL
;
434 f
->x_used_temp_slots
= NULL
;
435 f
->arg_offset_rtx
= NULL
;
436 f
->return_rtx
= NULL
;
437 f
->internal_arg_pointer
= NULL
;
438 f
->x_nonlocal_goto_handler_labels
= NULL
;
439 f
->x_cleanup_label
= NULL
;
440 f
->x_return_label
= NULL
;
441 f
->x_naked_return_label
= NULL
;
442 f
->computed_goto_common_label
= NULL
;
443 f
->computed_goto_common_reg
= NULL
;
444 f
->x_save_expr_regs
= NULL
;
445 f
->x_stack_slot_list
= NULL
;
446 f
->x_rtl_expr_chain
= NULL
;
447 f
->x_tail_recursion_label
= NULL
;
448 f
->x_tail_recursion_reentry
= NULL
;
449 f
->x_arg_pointer_save_area
= NULL
;
450 f
->x_parm_birth_insn
= NULL
;
451 f
->x_last_parm_insn
= NULL
;
452 f
->x_parm_reg_stack_loc
= NULL
;
453 f
->fixup_var_refs_queue
= NULL
;
454 f
->original_arg_vector
= NULL
;
455 f
->original_decl_initial
= NULL
;
456 f
->inl_last_parm_insn
= NULL
;
457 f
->epilogue_delay_list
= NULL
;
460 /* Allocate fixed slots in the stack frame of the current function. */
462 /* Return size needed for stack frame based on slots so far allocated in
464 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
465 the caller may have to do that. */
468 get_func_frame_size (struct function
*f
)
470 #ifdef FRAME_GROWS_DOWNWARD
471 return -f
->x_frame_offset
;
473 return f
->x_frame_offset
;
477 /* Return size needed for stack frame based on slots so far allocated.
478 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
479 the caller may have to do that. */
481 get_frame_size (void)
483 return get_func_frame_size (cfun
);
486 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
487 with machine mode MODE.
489 ALIGN controls the amount of alignment for the address of the slot:
490 0 means according to MODE,
491 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
492 -2 means use BITS_PER_UNIT,
493 positive specifies alignment boundary in bits.
495 We do not round to stack_boundary here.
497 FUNCTION specifies the function to allocate in. */
500 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
, int align
,
501 struct function
*function
)
504 int bigend_correction
= 0;
506 int frame_off
, frame_alignment
, frame_phase
;
513 alignment
= BIGGEST_ALIGNMENT
;
515 alignment
= GET_MODE_ALIGNMENT (mode
);
517 /* Allow the target to (possibly) increase the alignment of this
519 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
521 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
523 alignment
/= BITS_PER_UNIT
;
525 else if (align
== -1)
527 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
528 size
= CEIL_ROUND (size
, alignment
);
530 else if (align
== -2)
531 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
533 alignment
= align
/ BITS_PER_UNIT
;
535 #ifdef FRAME_GROWS_DOWNWARD
536 function
->x_frame_offset
-= size
;
539 /* Ignore alignment we can't do with expected alignment of the boundary. */
540 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
541 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
543 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
544 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
546 /* Calculate how many bytes the start of local variables is off from
548 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
549 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
550 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
552 /* Round the frame offset to the specified alignment. The default is
553 to always honor requests to align the stack but a port may choose to
554 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
555 if (STACK_ALIGNMENT_NEEDED
559 /* We must be careful here, since FRAME_OFFSET might be negative and
560 division with a negative dividend isn't as well defined as we might
561 like. So we instead assume that ALIGNMENT is a power of two and
562 use logical operations which are unambiguous. */
563 #ifdef FRAME_GROWS_DOWNWARD
564 function
->x_frame_offset
565 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
568 function
->x_frame_offset
569 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
574 /* On a big-endian machine, if we are allocating more space than we will use,
575 use the least significant bytes of those that are allocated. */
576 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
577 bigend_correction
= size
- GET_MODE_SIZE (mode
);
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (function
== cfun
&& virtuals_instantiated
)
582 addr
= plus_constant (frame_pointer_rtx
,
584 (frame_offset
+ bigend_correction
585 + STARTING_FRAME_OFFSET
, Pmode
));
587 addr
= plus_constant (virtual_stack_vars_rtx
,
589 (function
->x_frame_offset
+ bigend_correction
,
592 #ifndef FRAME_GROWS_DOWNWARD
593 function
->x_frame_offset
+= size
;
596 x
= gen_rtx_MEM (mode
, addr
);
598 function
->x_stack_slot_list
599 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
604 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
608 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
610 return assign_stack_local_1 (mode
, size
, align
, cfun
);
614 /* Removes temporary slot TEMP from LIST. */
617 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
620 temp
->next
->prev
= temp
->prev
;
622 temp
->prev
->next
= temp
->next
;
626 temp
->prev
= temp
->next
= NULL
;
629 /* Inserts temporary slot TEMP to LIST. */
632 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
636 (*list
)->prev
= temp
;
641 /* Returns the list of used temp slots at LEVEL. */
643 static struct temp_slot
**
644 temp_slots_at_level (int level
)
648 if (!used_temp_slots
)
649 VARRAY_GENERIC_PTR_INIT (used_temp_slots
, 3, "used_temp_slots");
651 while (level
>= (int) VARRAY_ACTIVE_SIZE (used_temp_slots
))
652 VARRAY_PUSH_GENERIC_PTR (used_temp_slots
, NULL
);
654 return (struct temp_slot
**) &VARRAY_GENERIC_PTR (used_temp_slots
, level
);
657 /* Returns the maximal temporary slot level. */
660 max_slot_level (void)
662 if (!used_temp_slots
)
665 return VARRAY_ACTIVE_SIZE (used_temp_slots
) - 1;
668 /* Moves temporary slot TEMP to LEVEL. */
671 move_slot_to_level (struct temp_slot
*temp
, int level
)
673 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
674 insert_slot_to_list (temp
, temp_slots_at_level (level
));
678 /* Make temporary slot TEMP available. */
681 make_slot_available (struct temp_slot
*temp
)
683 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
684 insert_slot_to_list (temp
, &avail_temp_slots
);
689 /* Allocate a temporary stack slot and record it for possible later
692 MODE is the machine mode to be given to the returned rtx.
694 SIZE is the size in units of the space required. We do no rounding here
695 since assign_stack_local will do any required rounding.
697 KEEP is 1 if this slot is to be retained after a call to
698 free_temp_slots. Automatic variables for a block are allocated
699 with this flag. KEEP is 2 if we allocate a longer term temporary,
700 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
701 if we are to allocate something at an inner level to be treated as
702 a variable in the block (e.g., a SAVE_EXPR).
704 TYPE is the type that will be used for the stack slot. */
707 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
,
711 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
714 /* If SIZE is -1 it means that somebody tried to allocate a temporary
715 of a variable size. */
720 align
= BIGGEST_ALIGNMENT
;
722 align
= GET_MODE_ALIGNMENT (mode
);
725 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
728 align
= LOCAL_ALIGNMENT (type
, align
);
730 /* Try to find an available, already-allocated temporary of the proper
731 mode which meets the size and alignment requirements. Choose the
732 smallest one with the closest alignment. */
733 for (p
= avail_temp_slots
; p
; p
= p
->next
)
735 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
736 && objects_must_conflict_p (p
->type
, type
)
737 && (best_p
== 0 || best_p
->size
> p
->size
738 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
740 if (p
->align
== align
&& p
->size
== size
)
743 cut_slot_from_list (selected
, &avail_temp_slots
);
751 /* Make our best, if any, the one to use. */
755 cut_slot_from_list (selected
, &avail_temp_slots
);
757 /* If there are enough aligned bytes left over, make them into a new
758 temp_slot so that the extra bytes don't get wasted. Do this only
759 for BLKmode slots, so that we can be sure of the alignment. */
760 if (GET_MODE (best_p
->slot
) == BLKmode
)
762 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
763 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
765 if (best_p
->size
- rounded_size
>= alignment
)
767 p
= ggc_alloc (sizeof (struct temp_slot
));
768 p
->in_use
= p
->addr_taken
= 0;
769 p
->size
= best_p
->size
- rounded_size
;
770 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
771 p
->full_size
= best_p
->full_size
- rounded_size
;
772 p
->slot
= gen_rtx_MEM (BLKmode
,
773 plus_constant (XEXP (best_p
->slot
, 0),
775 p
->align
= best_p
->align
;
778 p
->type
= best_p
->type
;
779 insert_slot_to_list (p
, &avail_temp_slots
);
781 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
784 best_p
->size
= rounded_size
;
785 best_p
->full_size
= rounded_size
;
790 /* If we still didn't find one, make a new temporary. */
793 HOST_WIDE_INT frame_offset_old
= frame_offset
;
795 p
= ggc_alloc (sizeof (struct temp_slot
));
797 /* We are passing an explicit alignment request to assign_stack_local.
798 One side effect of that is assign_stack_local will not round SIZE
799 to ensure the frame offset remains suitably aligned.
801 So for requests which depended on the rounding of SIZE, we go ahead
802 and round it now. We also make sure ALIGNMENT is at least
803 BIGGEST_ALIGNMENT. */
804 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
806 p
->slot
= assign_stack_local (mode
,
808 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
814 /* The following slot size computation is necessary because we don't
815 know the actual size of the temporary slot until assign_stack_local
816 has performed all the frame alignment and size rounding for the
817 requested temporary. Note that extra space added for alignment
818 can be either above or below this stack slot depending on which
819 way the frame grows. We include the extra space if and only if it
820 is above this slot. */
821 #ifdef FRAME_GROWS_DOWNWARD
822 p
->size
= frame_offset_old
- frame_offset
;
827 /* Now define the fields used by combine_temp_slots. */
828 #ifdef FRAME_GROWS_DOWNWARD
829 p
->base_offset
= frame_offset
;
830 p
->full_size
= frame_offset_old
- frame_offset
;
832 p
->base_offset
= frame_offset_old
;
833 p
->full_size
= frame_offset
- frame_offset_old
;
843 p
->rtl_expr
= seq_rtl_expr
;
848 p
->level
= target_temp_slot_level
;
853 p
->level
= var_temp_slot_level
;
858 p
->level
= temp_slot_level
;
862 pp
= temp_slots_at_level (p
->level
);
863 insert_slot_to_list (p
, pp
);
865 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
866 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
867 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
869 /* If we know the alias set for the memory that will be used, use
870 it. If there's no TYPE, then we don't know anything about the
871 alias set for the memory. */
872 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
873 set_mem_align (slot
, align
);
875 /* If a type is specified, set the relevant flags. */
878 RTX_UNCHANGING_P (slot
) = (lang_hooks
.honor_readonly
879 && TYPE_READONLY (type
));
880 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
881 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
887 /* Allocate a temporary stack slot and record it for possible later
888 reuse. First three arguments are same as in preceding function. */
891 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
893 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
896 /* Assign a temporary.
897 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
898 and so that should be used in error messages. In either case, we
899 allocate of the given type.
900 KEEP is as for assign_stack_temp.
901 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
902 it is 0 if a register is OK.
903 DONT_PROMOTE is 1 if we should not promote values in register
907 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
908 int dont_promote ATTRIBUTE_UNUSED
)
911 enum machine_mode mode
;
916 if (DECL_P (type_or_decl
))
917 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
919 decl
= NULL
, type
= type_or_decl
;
921 mode
= TYPE_MODE (type
);
923 unsignedp
= TYPE_UNSIGNED (type
);
926 if (mode
== BLKmode
|| memory_required
)
928 HOST_WIDE_INT size
= int_size_in_bytes (type
);
931 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
932 problems with allocating the stack space. */
936 /* Unfortunately, we don't yet know how to allocate variable-sized
937 temporaries. However, sometimes we have a fixed upper limit on
938 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
939 instead. This is the case for Chill variable-sized strings. */
940 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
941 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
942 && host_integerp (TYPE_ARRAY_MAX_SIZE (type
), 1))
943 size
= tree_low_cst (TYPE_ARRAY_MAX_SIZE (type
), 1);
945 /* The size of the temporary may be too large to fit into an integer. */
946 /* ??? Not sure this should happen except for user silliness, so limit
947 this to things that aren't compiler-generated temporaries. The
948 rest of the time we'll abort in assign_stack_temp_for_type. */
949 if (decl
&& size
== -1
950 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
952 error ("%Jsize of variable '%D' is too large", decl
, decl
);
956 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
962 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
965 return gen_reg_rtx (mode
);
968 /* Combine temporary stack slots which are adjacent on the stack.
970 This allows for better use of already allocated stack space. This is only
971 done for BLKmode slots because we can be sure that we won't have alignment
972 problems in this case. */
975 combine_temp_slots (void)
977 struct temp_slot
*p
, *q
, *next
, *next_q
;
980 /* We can't combine slots, because the information about which slot
981 is in which alias set will be lost. */
982 if (flag_strict_aliasing
)
985 /* If there are a lot of temp slots, don't do anything unless
986 high levels of optimization. */
987 if (! flag_expensive_optimizations
)
988 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
989 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
992 for (p
= avail_temp_slots
; p
; p
= next
)
998 if (GET_MODE (p
->slot
) != BLKmode
)
1001 for (q
= p
->next
; q
; q
= next_q
)
1007 if (GET_MODE (q
->slot
) != BLKmode
)
1010 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1012 /* Q comes after P; combine Q into P. */
1014 p
->full_size
+= q
->full_size
;
1017 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1019 /* P comes after Q; combine P into Q. */
1021 q
->full_size
+= p
->full_size
;
1026 cut_slot_from_list (q
, &avail_temp_slots
);
1029 /* Either delete P or advance past it. */
1031 cut_slot_from_list (p
, &avail_temp_slots
);
1035 /* Find the temp slot corresponding to the object at address X. */
1037 static struct temp_slot
*
1038 find_temp_slot_from_address (rtx x
)
1040 struct temp_slot
*p
;
1044 for (i
= max_slot_level (); i
>= 0; i
--)
1045 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
1047 if (XEXP (p
->slot
, 0) == x
1049 || (GET_CODE (x
) == PLUS
1050 && XEXP (x
, 0) == virtual_stack_vars_rtx
1051 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1052 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
1053 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
1056 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1057 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1058 if (XEXP (next
, 0) == x
)
1062 /* If we have a sum involving a register, see if it points to a temp
1064 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
1065 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
1067 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
1068 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
1074 /* Indicate that NEW is an alternate way of referring to the temp slot
1075 that previously was known by OLD. */
1078 update_temp_slot_address (rtx old
, rtx
new)
1080 struct temp_slot
*p
;
1082 if (rtx_equal_p (old
, new))
1085 p
= find_temp_slot_from_address (old
);
1087 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1088 is a register, see if one operand of the PLUS is a temporary
1089 location. If so, NEW points into it. Otherwise, if both OLD and
1090 NEW are a PLUS and if there is a register in common between them.
1091 If so, try a recursive call on those values. */
1094 if (GET_CODE (old
) != PLUS
)
1097 if (GET_CODE (new) == REG
)
1099 update_temp_slot_address (XEXP (old
, 0), new);
1100 update_temp_slot_address (XEXP (old
, 1), new);
1103 else if (GET_CODE (new) != PLUS
)
1106 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1107 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1108 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1109 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1110 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1111 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1112 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1113 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1118 /* Otherwise add an alias for the temp's address. */
1119 else if (p
->address
== 0)
1123 if (GET_CODE (p
->address
) != EXPR_LIST
)
1124 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1126 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1130 /* If X could be a reference to a temporary slot, mark the fact that its
1131 address was taken. */
1134 mark_temp_addr_taken (rtx x
)
1136 struct temp_slot
*p
;
1141 /* If X is not in memory or is at a constant address, it cannot be in
1142 a temporary slot. */
1143 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1146 p
= find_temp_slot_from_address (XEXP (x
, 0));
1151 /* If X could be a reference to a temporary slot, mark that slot as
1152 belonging to the to one level higher than the current level. If X
1153 matched one of our slots, just mark that one. Otherwise, we can't
1154 easily predict which it is, so upgrade all of them. Kept slots
1155 need not be touched.
1157 This is called when an ({...}) construct occurs and a statement
1158 returns a value in memory. */
1161 preserve_temp_slots (rtx x
)
1163 struct temp_slot
*p
= 0, *next
;
1165 /* If there is no result, we still might have some objects whose address
1166 were taken, so we need to make sure they stay around. */
1169 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1174 move_slot_to_level (p
, temp_slot_level
- 1);
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
1184 if (GET_CODE (x
) == REG
&& REG_POINTER (x
))
1185 p
= find_temp_slot_from_address (x
);
1187 /* If X is not in memory or is at a constant address, it cannot be in
1188 a temporary slot, but it can contain something whose address was
1190 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1192 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1197 move_slot_to_level (p
, temp_slot_level
- 1);
1203 /* First see if we can find a match. */
1205 p
= find_temp_slot_from_address (XEXP (x
, 0));
1209 /* Move everything at our level whose address was taken to our new
1210 level in case we used its address. */
1211 struct temp_slot
*q
;
1213 if (p
->level
== temp_slot_level
)
1215 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1219 if (p
!= q
&& q
->addr_taken
)
1220 move_slot_to_level (q
, temp_slot_level
- 1);
1223 move_slot_to_level (p
, temp_slot_level
- 1);
1229 /* Otherwise, preserve all non-kept slots at this level. */
1230 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1235 move_slot_to_level (p
, temp_slot_level
- 1);
1239 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1240 with that RTL_EXPR, promote it into a temporary slot at the present
1241 level so it will not be freed when we free slots made in the
1245 preserve_rtl_expr_result (rtx x
)
1247 struct temp_slot
*p
;
1249 /* If X is not in memory or is at a constant address, it cannot be in
1250 a temporary slot. */
1251 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1254 /* If we can find a match, move it to our level unless it is already at
1256 p
= find_temp_slot_from_address (XEXP (x
, 0));
1259 move_slot_to_level (p
, MIN (p
->level
, temp_slot_level
));
1266 /* Free all temporaries used so far. This is normally called at the end
1267 of generating code for a statement. Don't free any temporaries
1268 currently in use for an RTL_EXPR that hasn't yet been emitted.
1269 We could eventually do better than this since it can be reused while
1270 generating the same RTL_EXPR, but this is complex and probably not
1274 free_temp_slots (void)
1276 struct temp_slot
*p
, *next
;
1278 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1282 if (!p
->keep
&& p
->rtl_expr
== 0)
1283 make_slot_available (p
);
1286 combine_temp_slots ();
1289 /* Free all temporary slots used in T, an RTL_EXPR node. */
1292 free_temps_for_rtl_expr (tree t
)
1294 struct temp_slot
*p
, *next
;
1296 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1300 if (p
->rtl_expr
== t
)
1302 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1303 needs to be preserved. This can happen if a temporary in
1304 the RTL_EXPR was addressed; preserve_temp_slots will move
1305 the temporary into a higher level. */
1306 if (temp_slot_level
<= p
->level
)
1307 make_slot_available (p
);
1309 p
->rtl_expr
= NULL_TREE
;
1313 combine_temp_slots ();
1316 /* Push deeper into the nesting level for stack temporaries. */
1319 push_temp_slots (void)
1324 /* Pop a temporary nesting level. All slots in use in the current level
1328 pop_temp_slots (void)
1330 struct temp_slot
*p
, *next
;
1332 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1336 if (p
->rtl_expr
== 0)
1337 make_slot_available (p
);
1340 combine_temp_slots ();
1345 /* Initialize temporary slots. */
1348 init_temp_slots (void)
1350 /* We have not allocated any temporaries yet. */
1351 avail_temp_slots
= 0;
1352 used_temp_slots
= 0;
1353 temp_slot_level
= 0;
1354 var_temp_slot_level
= 0;
1355 target_temp_slot_level
= 0;
1358 /* Retroactively move an auto variable from a register to a stack
1359 slot. This is done when an address-reference to the variable is
1360 seen. If RESCAN is true, all previously emitted instructions are
1361 examined and modified to handle the fact that DECL is now
1365 put_var_into_stack (tree decl
, int rescan
)
1368 enum machine_mode promoted_mode
, decl_mode
;
1369 struct function
*function
= 0;
1371 bool can_use_addressof_p
;
1372 bool volatile_p
= TREE_CODE (decl
) != SAVE_EXPR
&& TREE_THIS_VOLATILE (decl
);
1373 bool used_p
= (TREE_USED (decl
)
1374 || (TREE_CODE (decl
) != SAVE_EXPR
&& DECL_INITIAL (decl
) != 0));
1376 context
= decl_function_context (decl
);
1378 /* Get the current rtl used for this object and its original mode. */
1379 orig_reg
= reg
= (TREE_CODE (decl
) == SAVE_EXPR
1380 ? SAVE_EXPR_RTL (decl
)
1381 : DECL_RTL_IF_SET (decl
));
1383 /* No need to do anything if decl has no rtx yet
1384 since in that case caller is setting TREE_ADDRESSABLE
1385 and a stack slot will be assigned when the rtl is made. */
1389 /* Get the declared mode for this object. */
1390 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1391 : DECL_MODE (decl
));
1392 /* Get the mode it's actually stored in. */
1393 promoted_mode
= GET_MODE (reg
);
1395 /* If this variable comes from an outer function, find that
1396 function's saved context. Don't use find_function_data here,
1397 because it might not be in any active function.
1398 FIXME: Is that really supposed to happen?
1399 It does in ObjC at least. */
1400 if (context
!= current_function_decl
)
1401 for (function
= outer_function_chain
; function
; function
= function
->outer
)
1402 if (function
->decl
== context
)
1405 /* If this is a variable-sized object or a structure passed by invisible
1406 reference, with a pseudo to address it, put that pseudo into the stack
1407 if the var is non-local. */
1408 if (TREE_CODE (decl
) != SAVE_EXPR
&& DECL_NONLOCAL (decl
)
1409 && GET_CODE (reg
) == MEM
1410 && GET_CODE (XEXP (reg
, 0)) == REG
1411 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1413 orig_reg
= reg
= XEXP (reg
, 0);
1414 decl_mode
= promoted_mode
= GET_MODE (reg
);
1417 /* If this variable lives in the current function and we don't need to put it
1418 in the stack for the sake of setjmp or the non-locality, try to keep it in
1419 a register until we know we actually need the address. */
1422 && ! (TREE_CODE (decl
) != SAVE_EXPR
&& DECL_NONLOCAL (decl
))
1424 /* FIXME make it work for promoted modes too */
1425 && decl_mode
== promoted_mode
1426 #ifdef NON_SAVING_SETJMP
1427 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1431 /* If we can't use ADDRESSOF, make sure we see through one we already
1433 if (! can_use_addressof_p
1434 && GET_CODE (reg
) == MEM
1435 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1436 reg
= XEXP (XEXP (reg
, 0), 0);
1438 /* Now we should have a value that resides in one or more pseudo regs. */
1440 if (GET_CODE (reg
) == REG
)
1442 if (can_use_addressof_p
)
1443 gen_mem_addressof (reg
, decl
, rescan
);
1445 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
), decl_mode
,
1446 0, volatile_p
, used_p
, false, 0);
1448 /* If this was previously a MEM but we've removed the ADDRESSOF,
1449 set this address into that MEM so we always use the same
1450 rtx for this variable. */
1451 if (orig_reg
!= reg
&& GET_CODE (orig_reg
) == MEM
)
1452 XEXP (orig_reg
, 0) = XEXP (reg
, 0);
1454 else if (GET_CODE (reg
) == CONCAT
)
1456 /* A CONCAT contains two pseudos; put them both in the stack.
1457 We do it so they end up consecutive.
1458 We fixup references to the parts only after we fixup references
1459 to the whole CONCAT, lest we do double fixups for the latter
1461 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1462 tree part_type
= lang_hooks
.types
.type_for_mode (part_mode
, 0);
1463 rtx lopart
= XEXP (reg
, 0);
1464 rtx hipart
= XEXP (reg
, 1);
1465 #ifdef FRAME_GROWS_DOWNWARD
1466 /* Since part 0 should have a lower address, do it second. */
1467 put_reg_into_stack (function
, hipart
, part_type
, part_mode
,
1468 0, volatile_p
, false, false, 0);
1469 put_reg_into_stack (function
, lopart
, part_type
, part_mode
,
1470 0, volatile_p
, false, true, 0);
1472 put_reg_into_stack (function
, lopart
, part_type
, part_mode
,
1473 0, volatile_p
, false, false, 0);
1474 put_reg_into_stack (function
, hipart
, part_type
, part_mode
,
1475 0, volatile_p
, false, true, 0);
1478 /* Change the CONCAT into a combined MEM for both parts. */
1479 PUT_CODE (reg
, MEM
);
1480 MEM_ATTRS (reg
) = 0;
1482 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1483 already computed alias sets. Here we want to re-generate. */
1485 SET_DECL_RTL (decl
, NULL
);
1486 set_mem_attributes (reg
, decl
, 1);
1488 SET_DECL_RTL (decl
, reg
);
1490 /* The two parts are in memory order already.
1491 Use the lower parts address as ours. */
1492 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1493 /* Prevent sharing of rtl that might lose. */
1494 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1495 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1496 if (used_p
&& rescan
)
1498 schedule_fixup_var_refs (function
, reg
, TREE_TYPE (decl
),
1500 schedule_fixup_var_refs (function
, lopart
, part_type
, part_mode
, 0);
1501 schedule_fixup_var_refs (function
, hipart
, part_type
, part_mode
, 0);
1508 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1509 into the stack frame of FUNCTION (0 means the current function).
1510 TYPE is the user-level data type of the value hold in the register.
1511 DECL_MODE is the machine mode of the user-level data type.
1512 ORIGINAL_REGNO must be set if the real regno is not visible in REG.
1513 VOLATILE_P is true if this is for a "volatile" decl.
1514 USED_P is true if this reg might have already been used in an insn.
1515 CONSECUTIVE_P is true if the stack slot assigned to reg must be
1516 consecutive with the previous stack slot. */
1519 put_reg_into_stack (struct function
*function
, rtx reg
, tree type
,
1520 enum machine_mode decl_mode
, unsigned int original_regno
,
1521 bool volatile_p
, bool used_p
, bool consecutive_p
,
1524 struct function
*func
= function
? function
: cfun
;
1525 enum machine_mode mode
= GET_MODE (reg
);
1526 unsigned int regno
= original_regno
;
1530 regno
= REGNO (reg
);
1532 if (regno
< func
->x_max_parm_reg
)
1534 if (!func
->x_parm_reg_stack_loc
)
1536 new = func
->x_parm_reg_stack_loc
[regno
];
1540 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
),
1541 consecutive_p
? -2 : 0, func
);
1543 PUT_CODE (reg
, MEM
);
1544 PUT_MODE (reg
, decl_mode
);
1545 XEXP (reg
, 0) = XEXP (new, 0);
1546 MEM_ATTRS (reg
) = 0;
1547 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1548 MEM_VOLATILE_P (reg
) = volatile_p
;
1550 /* If this is a memory ref that contains aggregate components,
1551 mark it as such for cse and loop optimize. If we are reusing a
1552 previously generated stack slot, then we need to copy the bit in
1553 case it was set for other reasons. For instance, it is set for
1554 __builtin_va_alist. */
1557 MEM_SET_IN_STRUCT_P (reg
,
1558 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1559 set_mem_alias_set (reg
, get_alias_set (type
));
1563 schedule_fixup_var_refs (function
, reg
, type
, mode
, ht
);
1566 /* Make sure that all refs to the variable, previously made
1567 when it was a register, are fixed up to be valid again.
1568 See function above for meaning of arguments. */
1571 schedule_fixup_var_refs (struct function
*function
, rtx reg
, tree type
,
1572 enum machine_mode promoted_mode
, htab_t ht
)
1574 int unsigned_p
= type
? TYPE_UNSIGNED (type
) : 0;
1578 struct var_refs_queue
*temp
;
1580 temp
= ggc_alloc (sizeof (struct var_refs_queue
));
1581 temp
->modified
= reg
;
1582 temp
->promoted_mode
= promoted_mode
;
1583 temp
->unsignedp
= unsigned_p
;
1584 temp
->next
= function
->fixup_var_refs_queue
;
1585 function
->fixup_var_refs_queue
= temp
;
1588 /* Variable is local; fix it up now. */
1589 fixup_var_refs (reg
, promoted_mode
, unsigned_p
, reg
, ht
);
1593 fixup_var_refs (rtx var
, enum machine_mode promoted_mode
, int unsignedp
,
1594 rtx may_share
, htab_t ht
)
1597 rtx first_insn
= get_insns ();
1598 struct sequence_stack
*stack
= seq_stack
;
1599 tree rtl_exps
= rtl_expr_chain
;
1600 int save_volatile_ok
= volatile_ok
;
1602 /* If there's a hash table, it must record all uses of VAR. */
1607 fixup_var_refs_insns_with_hash (ht
, var
, promoted_mode
, unsignedp
,
1612 /* Volatile is valid in MEMs because all we're doing in changing the
1615 fixup_var_refs_insns (first_insn
, var
, promoted_mode
, unsignedp
,
1616 stack
== 0, may_share
);
1618 /* Scan all pending sequences too. */
1619 for (; stack
; stack
= stack
->next
)
1621 push_to_full_sequence (stack
->first
, stack
->last
);
1622 fixup_var_refs_insns (stack
->first
, var
, promoted_mode
, unsignedp
,
1623 stack
->next
!= 0, may_share
);
1624 /* Update remembered end of sequence
1625 in case we added an insn at the end. */
1626 stack
->last
= get_last_insn ();
1630 /* Scan all waiting RTL_EXPRs too. */
1631 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1633 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1634 if (seq
!= const0_rtx
&& seq
!= 0)
1636 push_to_sequence (seq
);
1637 fixup_var_refs_insns (seq
, var
, promoted_mode
, unsignedp
, 0,
1643 volatile_ok
= save_volatile_ok
;
1646 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1647 some part of an insn. Return a struct fixup_replacement whose OLD
1648 value is equal to X. Allocate a new structure if no such entry exists. */
1650 static struct fixup_replacement
*
1651 find_fixup_replacement (struct fixup_replacement
**replacements
, rtx x
)
1653 struct fixup_replacement
*p
;
1655 /* See if we have already replaced this. */
1656 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1661 p
= xmalloc (sizeof (struct fixup_replacement
));
1664 p
->next
= *replacements
;
1671 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1672 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1673 for the current function. MAY_SHARE is either a MEM that is not
1674 to be unshared or a list of them. */
1677 fixup_var_refs_insns (rtx insn
, rtx var
, enum machine_mode promoted_mode
,
1678 int unsignedp
, int toplevel
, rtx may_share
)
1682 /* fixup_var_refs_insn might modify insn, so save its next
1684 rtx next
= NEXT_INSN (insn
);
1686 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1687 the three sequences they (potentially) contain, and process
1688 them recursively. The CALL_INSN itself is not interesting. */
1690 if (GET_CODE (insn
) == CALL_INSN
1691 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1695 /* Look at the Normal call, sibling call and tail recursion
1696 sequences attached to the CALL_PLACEHOLDER. */
1697 for (i
= 0; i
< 3; i
++)
1699 rtx seq
= XEXP (PATTERN (insn
), i
);
1702 push_to_sequence (seq
);
1703 fixup_var_refs_insns (seq
, var
, promoted_mode
, unsignedp
, 0,
1705 XEXP (PATTERN (insn
), i
) = get_insns ();
1711 else if (INSN_P (insn
))
1712 fixup_var_refs_insn (insn
, var
, promoted_mode
, unsignedp
, toplevel
,
1719 /* Look up the insns which reference VAR in HT and fix them up. Other
1720 arguments are the same as fixup_var_refs_insns.
1722 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1723 because the hash table will point straight to the interesting insn
1724 (inside the CALL_PLACEHOLDER). */
1727 fixup_var_refs_insns_with_hash (htab_t ht
, rtx var
, enum machine_mode promoted_mode
,
1728 int unsignedp
, rtx may_share
)
1730 struct insns_for_mem_entry tmp
;
1731 struct insns_for_mem_entry
*ime
;
1735 ime
= htab_find (ht
, &tmp
);
1736 for (insn_list
= ime
->insns
; insn_list
!= 0; insn_list
= XEXP (insn_list
, 1))
1737 if (INSN_P (XEXP (insn_list
, 0)))
1738 fixup_var_refs_insn (XEXP (insn_list
, 0), var
, promoted_mode
,
1739 unsignedp
, 1, may_share
);
1743 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1744 the insn under examination, VAR is the variable to fix up
1745 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1746 TOPLEVEL is nonzero if this is the main insn chain for this
1750 fixup_var_refs_insn (rtx insn
, rtx var
, enum machine_mode promoted_mode
,
1751 int unsignedp
, int toplevel
, rtx no_share
)
1754 rtx set
, prev
, prev_set
;
1757 /* Remember the notes in case we delete the insn. */
1758 note
= REG_NOTES (insn
);
1760 /* If this is a CLOBBER of VAR, delete it.
1762 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1763 and REG_RETVAL notes too. */
1764 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1765 && (XEXP (PATTERN (insn
), 0) == var
1766 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1767 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1768 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1770 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1771 /* The REG_LIBCALL note will go away since we are going to
1772 turn INSN into a NOTE, so just delete the
1773 corresponding REG_RETVAL note. */
1774 remove_note (XEXP (note
, 0),
1775 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1781 /* The insn to load VAR from a home in the arglist
1782 is now a no-op. When we see it, just delete it.
1783 Similarly if this is storing VAR from a register from which
1784 it was loaded in the previous insn. This will occur
1785 when an ADDRESSOF was made for an arglist slot. */
1787 && (set
= single_set (insn
)) != 0
1788 && SET_DEST (set
) == var
1789 /* If this represents the result of an insn group,
1790 don't delete the insn. */
1791 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1792 && (rtx_equal_p (SET_SRC (set
), var
)
1793 || (GET_CODE (SET_SRC (set
)) == REG
1794 && (prev
= prev_nonnote_insn (insn
)) != 0
1795 && (prev_set
= single_set (prev
)) != 0
1796 && SET_DEST (prev_set
) == SET_SRC (set
)
1797 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1803 struct fixup_replacement
*replacements
= 0;
1804 rtx next_insn
= NEXT_INSN (insn
);
1806 if (SMALL_REGISTER_CLASSES
)
1808 /* If the insn that copies the results of a CALL_INSN
1809 into a pseudo now references VAR, we have to use an
1810 intermediate pseudo since we want the life of the
1811 return value register to be only a single insn.
1813 If we don't use an intermediate pseudo, such things as
1814 address computations to make the address of VAR valid
1815 if it is not can be placed between the CALL_INSN and INSN.
1817 To make sure this doesn't happen, we record the destination
1818 of the CALL_INSN and see if the next insn uses both that
1821 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1822 && reg_mentioned_p (var
, PATTERN (insn
))
1823 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1825 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1827 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1829 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1833 if (GET_CODE (insn
) == CALL_INSN
1834 && GET_CODE (PATTERN (insn
)) == SET
)
1835 call_dest
= SET_DEST (PATTERN (insn
));
1836 else if (GET_CODE (insn
) == CALL_INSN
1837 && GET_CODE (PATTERN (insn
)) == PARALLEL
1838 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1839 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1844 /* See if we have to do anything to INSN now that VAR is in
1845 memory. If it needs to be loaded into a pseudo, use a single
1846 pseudo for the entire insn in case there is a MATCH_DUP
1847 between two operands. We pass a pointer to the head of
1848 a list of struct fixup_replacements. If fixup_var_refs_1
1849 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1850 it will record them in this list.
1852 If it allocated a pseudo for any replacement, we copy into
1855 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1856 &replacements
, no_share
);
1858 /* If this is last_parm_insn, and any instructions were output
1859 after it to fix it up, then we must set last_parm_insn to
1860 the last such instruction emitted. */
1861 if (insn
== last_parm_insn
)
1862 last_parm_insn
= PREV_INSN (next_insn
);
1864 while (replacements
)
1866 struct fixup_replacement
*next
;
1868 if (GET_CODE (replacements
->new) == REG
)
1873 /* OLD might be a (subreg (mem)). */
1874 if (GET_CODE (replacements
->old
) == SUBREG
)
1876 = fixup_memory_subreg (replacements
->old
, insn
,
1880 = fixup_stack_1 (replacements
->old
, insn
);
1882 insert_before
= insn
;
1884 /* If we are changing the mode, do a conversion.
1885 This might be wasteful, but combine.c will
1886 eliminate much of the waste. */
1888 if (GET_MODE (replacements
->new)
1889 != GET_MODE (replacements
->old
))
1892 convert_move (replacements
->new,
1893 replacements
->old
, unsignedp
);
1898 seq
= gen_move_insn (replacements
->new,
1901 emit_insn_before (seq
, insert_before
);
1904 next
= replacements
->next
;
1905 free (replacements
);
1906 replacements
= next
;
1910 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1911 But don't touch other insns referred to by reg-notes;
1912 we will get them elsewhere. */
1915 if (GET_CODE (note
) != INSN_LIST
)
1917 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
,
1919 note
= XEXP (note
, 1);
1923 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1924 See if the rtx expression at *LOC in INSN needs to be changed.
1926 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1927 contain a list of original rtx's and replacements. If we find that we need
1928 to modify this insn by replacing a memory reference with a pseudo or by
1929 making a new MEM to implement a SUBREG, we consult that list to see if
1930 we have already chosen a replacement. If none has already been allocated,
1931 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1932 or the SUBREG, as appropriate, to the pseudo. */
1935 fixup_var_refs_1 (rtx var
, enum machine_mode promoted_mode
, rtx
*loc
, rtx insn
,
1936 struct fixup_replacement
**replacements
, rtx no_share
)
1940 RTX_CODE code
= GET_CODE (x
);
1943 struct fixup_replacement
*replacement
;
1948 if (XEXP (x
, 0) == var
)
1950 /* Prevent sharing of rtl that might lose. */
1951 rtx sub
= copy_rtx (XEXP (var
, 0));
1953 if (! validate_change (insn
, loc
, sub
, 0))
1955 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1958 /* We should be able to replace with a register or all is lost.
1959 Note that we can't use validate_change to verify this, since
1960 we're not caring for replacing all dups simultaneously. */
1961 if (! validate_replace_rtx (*loc
, y
, insn
))
1964 /* Careful! First try to recognize a direct move of the
1965 value, mimicking how things are done in gen_reload wrt
1966 PLUS. Consider what happens when insn is a conditional
1967 move instruction and addsi3 clobbers flags. */
1970 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1974 if (recog_memoized (new_insn
) < 0)
1976 /* That failed. Fall back on force_operand and hope. */
1979 sub
= force_operand (sub
, y
);
1981 emit_insn (gen_move_insn (y
, sub
));
1987 /* Don't separate setter from user. */
1988 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1989 insn
= PREV_INSN (insn
);
1992 emit_insn_before (seq
, insn
);
2000 /* If we already have a replacement, use it. Otherwise,
2001 try to fix up this address in case it is invalid. */
2003 replacement
= find_fixup_replacement (replacements
, var
);
2004 if (replacement
->new)
2006 *loc
= replacement
->new;
2010 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
2012 /* Unless we are forcing memory to register or we changed the mode,
2013 we can leave things the way they are if the insn is valid. */
2015 INSN_CODE (insn
) = -1;
2016 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
2017 && recog_memoized (insn
) >= 0)
2020 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
2024 /* If X contains VAR, we need to unshare it here so that we update
2025 each occurrence separately. But all identical MEMs in one insn
2026 must be replaced with the same rtx because of the possibility of
2029 if (reg_mentioned_p (var
, x
))
2031 replacement
= find_fixup_replacement (replacements
, x
);
2032 if (replacement
->new == 0)
2033 replacement
->new = copy_most_rtx (x
, no_share
);
2035 *loc
= x
= replacement
->new;
2036 code
= GET_CODE (x
);
2053 /* Note that in some cases those types of expressions are altered
2054 by optimize_bit_field, and do not survive to get here. */
2055 if (XEXP (x
, 0) == var
2056 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2057 && SUBREG_REG (XEXP (x
, 0)) == var
))
2059 /* Get TEM as a valid MEM in the mode presently in the insn.
2061 We don't worry about the possibility of MATCH_DUP here; it
2062 is highly unlikely and would be tricky to handle. */
2065 if (GET_CODE (tem
) == SUBREG
)
2067 if (GET_MODE_BITSIZE (GET_MODE (tem
))
2068 > GET_MODE_BITSIZE (GET_MODE (var
)))
2070 replacement
= find_fixup_replacement (replacements
, var
);
2071 if (replacement
->new == 0)
2072 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2073 SUBREG_REG (tem
) = replacement
->new;
2075 /* The following code works only if we have a MEM, so we
2076 need to handle the subreg here. We directly substitute
2077 it assuming that a subreg must be OK here. We already
2078 scheduled a replacement to copy the mem into the
2084 tem
= fixup_memory_subreg (tem
, insn
, promoted_mode
, 0);
2087 tem
= fixup_stack_1 (tem
, insn
);
2089 /* Unless we want to load from memory, get TEM into the proper mode
2090 for an extract from memory. This can only be done if the
2091 extract is at a constant position and length. */
2093 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
2094 && GET_CODE (XEXP (x
, 2)) == CONST_INT
2095 && ! mode_dependent_address_p (XEXP (tem
, 0))
2096 && ! MEM_VOLATILE_P (tem
))
2098 enum machine_mode wanted_mode
= VOIDmode
;
2099 enum machine_mode is_mode
= GET_MODE (tem
);
2100 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
2102 if (GET_CODE (x
) == ZERO_EXTRACT
)
2104 enum machine_mode new_mode
2105 = mode_for_extraction (EP_extzv
, 1);
2106 if (new_mode
!= MAX_MACHINE_MODE
)
2107 wanted_mode
= new_mode
;
2109 else if (GET_CODE (x
) == SIGN_EXTRACT
)
2111 enum machine_mode new_mode
2112 = mode_for_extraction (EP_extv
, 1);
2113 if (new_mode
!= MAX_MACHINE_MODE
)
2114 wanted_mode
= new_mode
;
2117 /* If we have a narrower mode, we can do something. */
2118 if (wanted_mode
!= VOIDmode
2119 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2121 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2122 rtx old_pos
= XEXP (x
, 2);
2125 /* If the bytes and bits are counted differently, we
2126 must adjust the offset. */
2127 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2128 offset
= (GET_MODE_SIZE (is_mode
)
2129 - GET_MODE_SIZE (wanted_mode
) - offset
);
2131 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2133 newmem
= adjust_address_nv (tem
, wanted_mode
, offset
);
2135 /* Make the change and see if the insn remains valid. */
2136 INSN_CODE (insn
) = -1;
2137 XEXP (x
, 0) = newmem
;
2138 XEXP (x
, 2) = GEN_INT (pos
);
2140 if (recog_memoized (insn
) >= 0)
2143 /* Otherwise, restore old position. XEXP (x, 0) will be
2145 XEXP (x
, 2) = old_pos
;
2149 /* If we get here, the bitfield extract insn can't accept a memory
2150 reference. Copy the input into a register. */
2152 tem1
= gen_reg_rtx (GET_MODE (tem
));
2153 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2160 if (SUBREG_REG (x
) == var
)
2162 /* If this is a special SUBREG made because VAR was promoted
2163 from a wider mode, replace it with VAR and call ourself
2164 recursively, this time saying that the object previously
2165 had its current mode (by virtue of the SUBREG). */
2167 if (SUBREG_PROMOTED_VAR_P (x
))
2170 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
,
2175 /* If this SUBREG makes VAR wider, it has become a paradoxical
2176 SUBREG with VAR in memory, but these aren't allowed at this
2177 stage of the compilation. So load VAR into a pseudo and take
2178 a SUBREG of that pseudo. */
2179 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2181 replacement
= find_fixup_replacement (replacements
, var
);
2182 if (replacement
->new == 0)
2183 replacement
->new = gen_reg_rtx (promoted_mode
);
2184 SUBREG_REG (x
) = replacement
->new;
2188 /* See if we have already found a replacement for this SUBREG.
2189 If so, use it. Otherwise, make a MEM and see if the insn
2190 is recognized. If not, or if we should force MEM into a register,
2191 make a pseudo for this SUBREG. */
2192 replacement
= find_fixup_replacement (replacements
, x
);
2193 if (replacement
->new)
2195 enum machine_mode mode
= GET_MODE (x
);
2196 *loc
= replacement
->new;
2198 /* Careful! We may have just replaced a SUBREG by a MEM, which
2199 means that the insn may have become invalid again. We can't
2200 in this case make a new replacement since we already have one
2201 and we must deal with MATCH_DUPs. */
2202 if (GET_CODE (replacement
->new) == MEM
)
2204 INSN_CODE (insn
) = -1;
2205 if (recog_memoized (insn
) >= 0)
2208 fixup_var_refs_1 (replacement
->new, mode
, &PATTERN (insn
),
2209 insn
, replacements
, no_share
);
2215 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
,
2218 INSN_CODE (insn
) = -1;
2219 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2222 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2228 /* First do special simplification of bit-field references. */
2229 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2230 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2231 optimize_bit_field (x
, insn
, 0);
2232 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2233 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2234 optimize_bit_field (x
, insn
, 0);
2236 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2237 into a register and then store it back out. */
2238 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2239 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2240 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2241 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2242 > GET_MODE_SIZE (GET_MODE (var
))))
2244 replacement
= find_fixup_replacement (replacements
, var
);
2245 if (replacement
->new == 0)
2246 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2248 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2249 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2252 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2253 insn into a pseudo and store the low part of the pseudo into VAR. */
2254 if (GET_CODE (SET_DEST (x
)) == SUBREG
2255 && SUBREG_REG (SET_DEST (x
)) == var
2256 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2257 > GET_MODE_SIZE (GET_MODE (var
))))
2259 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2260 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2267 rtx dest
= SET_DEST (x
);
2268 rtx src
= SET_SRC (x
);
2269 rtx outerdest
= dest
;
2271 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2272 || GET_CODE (dest
) == SIGN_EXTRACT
2273 || GET_CODE (dest
) == ZERO_EXTRACT
)
2274 dest
= XEXP (dest
, 0);
2276 if (GET_CODE (src
) == SUBREG
)
2277 src
= SUBREG_REG (src
);
2279 /* If VAR does not appear at the top level of the SET
2280 just scan the lower levels of the tree. */
2282 if (src
!= var
&& dest
!= var
)
2285 /* We will need to rerecognize this insn. */
2286 INSN_CODE (insn
) = -1;
2288 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
2289 && mode_for_extraction (EP_insv
, -1) != MAX_MACHINE_MODE
)
2291 /* Since this case will return, ensure we fixup all the
2293 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2294 insn
, replacements
, no_share
);
2295 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2296 insn
, replacements
, no_share
);
2297 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2298 insn
, replacements
, no_share
);
2300 tem
= XEXP (outerdest
, 0);
2302 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2303 that may appear inside a ZERO_EXTRACT.
2304 This was legitimate when the MEM was a REG. */
2305 if (GET_CODE (tem
) == SUBREG
2306 && SUBREG_REG (tem
) == var
)
2307 tem
= fixup_memory_subreg (tem
, insn
, promoted_mode
, 0);
2309 tem
= fixup_stack_1 (tem
, insn
);
2311 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2312 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2313 && ! mode_dependent_address_p (XEXP (tem
, 0))
2314 && ! MEM_VOLATILE_P (tem
))
2316 enum machine_mode wanted_mode
;
2317 enum machine_mode is_mode
= GET_MODE (tem
);
2318 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2320 wanted_mode
= mode_for_extraction (EP_insv
, 0);
2322 /* If we have a narrower mode, we can do something. */
2323 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2325 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2326 rtx old_pos
= XEXP (outerdest
, 2);
2329 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2330 offset
= (GET_MODE_SIZE (is_mode
)
2331 - GET_MODE_SIZE (wanted_mode
) - offset
);
2333 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2335 newmem
= adjust_address_nv (tem
, wanted_mode
, offset
);
2337 /* Make the change and see if the insn remains valid. */
2338 INSN_CODE (insn
) = -1;
2339 XEXP (outerdest
, 0) = newmem
;
2340 XEXP (outerdest
, 2) = GEN_INT (pos
);
2342 if (recog_memoized (insn
) >= 0)
2345 /* Otherwise, restore old position. XEXP (x, 0) will be
2347 XEXP (outerdest
, 2) = old_pos
;
2351 /* If we get here, the bit-field store doesn't allow memory
2352 or isn't located at a constant position. Load the value into
2353 a register, do the store, and put it back into memory. */
2355 tem1
= gen_reg_rtx (GET_MODE (tem
));
2356 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2357 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2358 XEXP (outerdest
, 0) = tem1
;
2362 /* STRICT_LOW_PART is a no-op on memory references
2363 and it can cause combinations to be unrecognizable,
2366 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2367 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2369 /* A valid insn to copy VAR into or out of a register
2370 must be left alone, to avoid an infinite loop here.
2371 If the reference to VAR is by a subreg, fix that up,
2372 since SUBREG is not valid for a memref.
2373 Also fix up the address of the stack slot.
2375 Note that we must not try to recognize the insn until
2376 after we know that we have valid addresses and no
2377 (subreg (mem ...) ...) constructs, since these interfere
2378 with determining the validity of the insn. */
2380 if ((SET_SRC (x
) == var
2381 || (GET_CODE (SET_SRC (x
)) == SUBREG
2382 && SUBREG_REG (SET_SRC (x
)) == var
))
2383 && (GET_CODE (SET_DEST (x
)) == REG
2384 || (GET_CODE (SET_DEST (x
)) == SUBREG
2385 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2386 && GET_MODE (var
) == promoted_mode
2387 && x
== single_set (insn
))
2391 if (GET_CODE (SET_SRC (x
)) == SUBREG
2392 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
2393 > GET_MODE_SIZE (GET_MODE (var
))))
2395 /* This (subreg VAR) is now a paradoxical subreg. We need
2396 to replace VAR instead of the subreg. */
2397 replacement
= find_fixup_replacement (replacements
, var
);
2398 if (replacement
->new == NULL_RTX
)
2399 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2400 SUBREG_REG (SET_SRC (x
)) = replacement
->new;
2404 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2405 if (replacement
->new)
2406 SET_SRC (x
) = replacement
->new;
2407 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2408 SET_SRC (x
) = replacement
->new
2409 = fixup_memory_subreg (SET_SRC (x
), insn
, promoted_mode
,
2412 SET_SRC (x
) = replacement
->new
2413 = fixup_stack_1 (SET_SRC (x
), insn
);
2416 if (recog_memoized (insn
) >= 0)
2419 /* INSN is not valid, but we know that we want to
2420 copy SET_SRC (x) to SET_DEST (x) in some way. So
2421 we generate the move and see whether it requires more
2422 than one insn. If it does, we emit those insns and
2423 delete INSN. Otherwise, we can just replace the pattern
2424 of INSN; we have already verified above that INSN has
2425 no other function that to do X. */
2427 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2428 if (NEXT_INSN (pat
) != NULL_RTX
)
2430 last
= emit_insn_before (pat
, insn
);
2432 /* INSN might have REG_RETVAL or other important notes, so
2433 we need to store the pattern of the last insn in the
2434 sequence into INSN similarly to the normal case. LAST
2435 should not have REG_NOTES, but we allow them if INSN has
2437 if (REG_NOTES (last
) && REG_NOTES (insn
))
2439 if (REG_NOTES (last
))
2440 REG_NOTES (insn
) = REG_NOTES (last
);
2441 PATTERN (insn
) = PATTERN (last
);
2446 PATTERN (insn
) = PATTERN (pat
);
2451 if ((SET_DEST (x
) == var
2452 || (GET_CODE (SET_DEST (x
)) == SUBREG
2453 && SUBREG_REG (SET_DEST (x
)) == var
))
2454 && (GET_CODE (SET_SRC (x
)) == REG
2455 || (GET_CODE (SET_SRC (x
)) == SUBREG
2456 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2457 && GET_MODE (var
) == promoted_mode
2458 && x
== single_set (insn
))
2462 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2463 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
,
2466 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2468 if (recog_memoized (insn
) >= 0)
2471 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2472 if (NEXT_INSN (pat
) != NULL_RTX
)
2474 last
= emit_insn_before (pat
, insn
);
2476 /* INSN might have REG_RETVAL or other important notes, so
2477 we need to store the pattern of the last insn in the
2478 sequence into INSN similarly to the normal case. LAST
2479 should not have REG_NOTES, but we allow them if INSN has
2481 if (REG_NOTES (last
) && REG_NOTES (insn
))
2483 if (REG_NOTES (last
))
2484 REG_NOTES (insn
) = REG_NOTES (last
);
2485 PATTERN (insn
) = PATTERN (last
);
2490 PATTERN (insn
) = PATTERN (pat
);
2495 /* Otherwise, storing into VAR must be handled specially
2496 by storing into a temporary and copying that into VAR
2497 with a new insn after this one. Note that this case
2498 will be used when storing into a promoted scalar since
2499 the insn will now have different modes on the input
2500 and output and hence will be invalid (except for the case
2501 of setting it to a constant, which does not need any
2502 change if it is valid). We generate extra code in that case,
2503 but combine.c will eliminate it. */
2508 rtx fixeddest
= SET_DEST (x
);
2509 enum machine_mode temp_mode
;
2511 /* STRICT_LOW_PART can be discarded, around a MEM. */
2512 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2513 fixeddest
= XEXP (fixeddest
, 0);
2514 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2515 if (GET_CODE (fixeddest
) == SUBREG
)
2517 fixeddest
= fixup_memory_subreg (fixeddest
, insn
,
2519 temp_mode
= GET_MODE (fixeddest
);
2523 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2524 temp_mode
= promoted_mode
;
2527 temp
= gen_reg_rtx (temp_mode
);
2529 emit_insn_after (gen_move_insn (fixeddest
,
2530 gen_lowpart (GET_MODE (fixeddest
),
2534 SET_DEST (x
) = temp
;
2542 /* Nothing special about this RTX; fix its operands. */
2544 fmt
= GET_RTX_FORMAT (code
);
2545 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2548 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
,
2550 else if (fmt
[i
] == 'E')
2553 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2554 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2555 insn
, replacements
, no_share
);
2560 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2561 The REG was placed on the stack, so X now has the form (SUBREG:m1
2564 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2565 must be emitted to compute NEWADDR, put them before INSN.
2567 UNCRITICAL nonzero means accept paradoxical subregs.
2568 This is used for subregs found inside REG_NOTES. */
2571 fixup_memory_subreg (rtx x
, rtx insn
, enum machine_mode promoted_mode
, int uncritical
)
2574 rtx mem
= SUBREG_REG (x
);
2575 rtx addr
= XEXP (mem
, 0);
2576 enum machine_mode mode
= GET_MODE (x
);
2579 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2580 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (mem
)) && ! uncritical
)
2583 offset
= SUBREG_BYTE (x
);
2584 if (BYTES_BIG_ENDIAN
)
2585 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2586 the offset so that it points to the right location within the
2588 offset
-= (GET_MODE_SIZE (promoted_mode
) - GET_MODE_SIZE (GET_MODE (mem
)));
2590 if (!flag_force_addr
2591 && memory_address_p (mode
, plus_constant (addr
, offset
)))
2592 /* Shortcut if no insns need be emitted. */
2593 return adjust_address (mem
, mode
, offset
);
2596 result
= adjust_address (mem
, mode
, offset
);
2600 emit_insn_before (seq
, insn
);
2604 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2605 Replace subexpressions of X in place.
2606 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2607 Otherwise return X, with its contents possibly altered.
2609 INSN, PROMOTED_MODE and UNCRITICAL are as for
2610 fixup_memory_subreg. */
2613 walk_fixup_memory_subreg (rtx x
, rtx insn
, enum machine_mode promoted_mode
,
2623 code
= GET_CODE (x
);
2625 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2626 return fixup_memory_subreg (x
, insn
, promoted_mode
, uncritical
);
2628 /* Nothing special about this RTX; fix its operands. */
2630 fmt
= GET_RTX_FORMAT (code
);
2631 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2634 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
,
2635 promoted_mode
, uncritical
);
2636 else if (fmt
[i
] == 'E')
2639 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2641 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
,
2642 promoted_mode
, uncritical
);
2648 /* For each memory ref within X, if it refers to a stack slot
2649 with an out of range displacement, put the address in a temp register
2650 (emitting new insns before INSN to load these registers)
2651 and alter the memory ref to use that register.
2652 Replace each such MEM rtx with a copy, to avoid clobberage. */
2655 fixup_stack_1 (rtx x
, rtx insn
)
2658 RTX_CODE code
= GET_CODE (x
);
2663 rtx ad
= XEXP (x
, 0);
2664 /* If we have address of a stack slot but it's not valid
2665 (displacement is too large), compute the sum in a register. */
2666 if (GET_CODE (ad
) == PLUS
2667 && GET_CODE (XEXP (ad
, 0)) == REG
2668 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2669 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2670 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2671 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2672 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2674 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2675 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2676 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2677 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2680 if (memory_address_p (GET_MODE (x
), ad
))
2684 temp
= copy_to_reg (ad
);
2687 emit_insn_before (seq
, insn
);
2688 return replace_equiv_address (x
, temp
);
2693 fmt
= GET_RTX_FORMAT (code
);
2694 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2697 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2698 else if (fmt
[i
] == 'E')
2701 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2702 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2708 /* Optimization: a bit-field instruction whose field
2709 happens to be a byte or halfword in memory
2710 can be changed to a move instruction.
2712 We call here when INSN is an insn to examine or store into a bit-field.
2713 BODY is the SET-rtx to be altered.
2715 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2716 (Currently this is called only from function.c, and EQUIV_MEM
2720 optimize_bit_field (rtx body
, rtx insn
, rtx
*equiv_mem
)
2725 enum machine_mode mode
;
2727 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2728 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2729 bitfield
= SET_DEST (body
), destflag
= 1;
2731 bitfield
= SET_SRC (body
), destflag
= 0;
2733 /* First check that the field being stored has constant size and position
2734 and is in fact a byte or halfword suitably aligned. */
2736 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2737 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2738 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2740 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2744 /* Now check that the containing word is memory, not a register,
2745 and that it is safe to change the machine mode. */
2747 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2748 memref
= XEXP (bitfield
, 0);
2749 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2751 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2752 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2753 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2754 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2755 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2757 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2758 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2761 && ! mode_dependent_address_p (XEXP (memref
, 0))
2762 && ! MEM_VOLATILE_P (memref
))
2764 /* Now adjust the address, first for any subreg'ing
2765 that we are now getting rid of,
2766 and then for which byte of the word is wanted. */
2768 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2771 /* Adjust OFFSET to count bits from low-address byte. */
2772 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2773 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2774 - offset
- INTVAL (XEXP (bitfield
, 1)));
2776 /* Adjust OFFSET to count bytes from low-address byte. */
2777 offset
/= BITS_PER_UNIT
;
2778 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2780 offset
+= (SUBREG_BYTE (XEXP (bitfield
, 0))
2781 / UNITS_PER_WORD
) * UNITS_PER_WORD
;
2782 if (BYTES_BIG_ENDIAN
)
2783 offset
-= (MIN (UNITS_PER_WORD
,
2784 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2785 - MIN (UNITS_PER_WORD
,
2786 GET_MODE_SIZE (GET_MODE (memref
))));
2790 memref
= adjust_address (memref
, mode
, offset
);
2791 insns
= get_insns ();
2793 emit_insn_before (insns
, insn
);
2795 /* Store this memory reference where
2796 we found the bit field reference. */
2800 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2801 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2803 rtx src
= SET_SRC (body
);
2804 while (GET_CODE (src
) == SUBREG
2805 && SUBREG_BYTE (src
) == 0)
2806 src
= SUBREG_REG (src
);
2807 if (GET_MODE (src
) != GET_MODE (memref
))
2808 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2809 validate_change (insn
, &SET_SRC (body
), src
, 1);
2811 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2812 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2813 /* This shouldn't happen because anything that didn't have
2814 one of these modes should have got converted explicitly
2815 and then referenced through a subreg.
2816 This is so because the original bit-field was
2817 handled by agg_mode and so its tree structure had
2818 the same mode that memref now has. */
2823 rtx dest
= SET_DEST (body
);
2825 while (GET_CODE (dest
) == SUBREG
2826 && SUBREG_BYTE (dest
) == 0
2827 && (GET_MODE_CLASS (GET_MODE (dest
))
2828 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2829 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2831 dest
= SUBREG_REG (dest
);
2833 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2835 if (GET_MODE (dest
) == GET_MODE (memref
))
2836 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2839 /* Convert the mem ref to the destination mode. */
2840 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2843 convert_move (newreg
, memref
,
2844 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2848 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2852 /* See if we can convert this extraction or insertion into
2853 a simple move insn. We might not be able to do so if this
2854 was, for example, part of a PARALLEL.
2856 If we succeed, write out any needed conversions. If we fail,
2857 it is hard to guess why we failed, so don't do anything
2858 special; just let the optimization be suppressed. */
2860 if (apply_change_group () && seq
)
2861 emit_insn_before (seq
, insn
);
2866 /* These routines are responsible for converting virtual register references
2867 to the actual hard register references once RTL generation is complete.
2869 The following four variables are used for communication between the
2870 routines. They contain the offsets of the virtual registers from their
2871 respective hard registers. */
2873 static int in_arg_offset
;
2874 static int var_offset
;
2875 static int dynamic_offset
;
2876 static int out_arg_offset
;
2877 static int cfa_offset
;
2879 /* In most machines, the stack pointer register is equivalent to the bottom
2882 #ifndef STACK_POINTER_OFFSET
2883 #define STACK_POINTER_OFFSET 0
2886 /* If not defined, pick an appropriate default for the offset of dynamically
2887 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2888 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2890 #ifndef STACK_DYNAMIC_OFFSET
2892 /* The bottom of the stack points to the actual arguments. If
2893 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2894 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2895 stack space for register parameters is not pushed by the caller, but
2896 rather part of the fixed stack areas and hence not included in
2897 `current_function_outgoing_args_size'. Nevertheless, we must allow
2898 for it when allocating stack dynamic objects. */
2900 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2901 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2902 ((ACCUMULATE_OUTGOING_ARGS \
2903 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2904 + (STACK_POINTER_OFFSET)) \
2907 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2908 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2909 + (STACK_POINTER_OFFSET))
2913 /* On most machines, the CFA coincides with the first incoming parm. */
2915 #ifndef ARG_POINTER_CFA_OFFSET
2916 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2919 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2920 had its address taken. DECL is the decl or SAVE_EXPR for the
2921 object stored in the register, for later use if we do need to force
2922 REG into the stack. REG is overwritten by the MEM like in
2923 put_reg_into_stack. RESCAN is true if previously emitted
2924 instructions must be rescanned and modified now that the REG has
2925 been transformed. */
2928 gen_mem_addressof (rtx reg
, tree decl
, int rescan
)
2930 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2933 /* Calculate this before we start messing with decl's RTL. */
2934 HOST_WIDE_INT set
= decl
? get_alias_set (decl
) : 0;
2936 /* If the original REG was a user-variable, then so is the REG whose
2937 address is being taken. Likewise for unchanging. */
2938 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2939 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2941 PUT_CODE (reg
, MEM
);
2942 MEM_VOLATILE_P (reg
) = 0;
2943 MEM_ATTRS (reg
) = 0;
2948 tree type
= TREE_TYPE (decl
);
2949 enum machine_mode decl_mode
2950 = (DECL_P (decl
) ? DECL_MODE (decl
) : TYPE_MODE (TREE_TYPE (decl
)));
2951 rtx decl_rtl
= (TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
)
2952 : DECL_RTL_IF_SET (decl
));
2954 PUT_MODE (reg
, decl_mode
);
2956 /* Clear DECL_RTL momentarily so functions below will work
2957 properly, then set it again. */
2958 if (DECL_P (decl
) && decl_rtl
== reg
)
2959 SET_DECL_RTL (decl
, 0);
2961 set_mem_attributes (reg
, decl
, 1);
2962 set_mem_alias_set (reg
, set
);
2964 if (DECL_P (decl
) && decl_rtl
== reg
)
2965 SET_DECL_RTL (decl
, reg
);
2968 && (TREE_USED (decl
) || (DECL_P (decl
) && DECL_INITIAL (decl
) != 0)))
2969 fixup_var_refs (reg
, GET_MODE (reg
), TYPE_UNSIGNED (type
), reg
, 0);
2973 /* This can only happen during reload. Clear the same flag bits as
2975 RTX_UNCHANGING_P (reg
) = 0;
2976 MEM_IN_STRUCT_P (reg
) = 0;
2977 MEM_SCALAR_P (reg
) = 0;
2979 fixup_var_refs (reg
, GET_MODE (reg
), 0, reg
, 0);
2985 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2988 flush_addressof (tree decl
)
2990 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2991 && DECL_RTL (decl
) != 0
2992 && GET_CODE (DECL_RTL (decl
)) == MEM
2993 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2994 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2995 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2998 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
3001 put_addressof_into_stack (rtx r
, htab_t ht
)
3004 bool volatile_p
, used_p
;
3006 rtx reg
= XEXP (r
, 0);
3008 if (GET_CODE (reg
) != REG
)
3011 decl
= ADDRESSOF_DECL (r
);
3014 type
= TREE_TYPE (decl
);
3015 volatile_p
= (TREE_CODE (decl
) != SAVE_EXPR
3016 && TREE_THIS_VOLATILE (decl
));
3017 used_p
= (TREE_USED (decl
)
3018 || (DECL_P (decl
) && DECL_INITIAL (decl
) != 0));
3027 put_reg_into_stack (0, reg
, type
, GET_MODE (reg
), ADDRESSOF_REGNO (r
),
3028 volatile_p
, used_p
, false, ht
);
3031 /* List of replacements made below in purge_addressof_1 when creating
3032 bitfield insertions. */
3033 static rtx purge_bitfield_addressof_replacements
;
3035 /* List of replacements made below in purge_addressof_1 for patterns
3036 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
3037 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3038 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3039 enough in complex cases, e.g. when some field values can be
3040 extracted by usage MEM with narrower mode. */
3041 static rtx purge_addressof_replacements
;
3043 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3044 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3045 the stack. If the function returns FALSE then the replacement could not
3046 be made. If MAY_POSTPONE is true and we would not put the addressof
3047 to stack, postpone processing of the insn. */
3050 purge_addressof_1 (rtx
*loc
, rtx insn
, int force
, int store
, int may_postpone
,
3058 bool libcall
= false;
3060 /* Re-start here to avoid recursion in common cases. */
3067 /* Is this a libcall? */
3069 libcall
= REG_NOTE_KIND (*loc
) == REG_RETVAL
;
3071 code
= GET_CODE (x
);
3073 /* If we don't return in any of the cases below, we will recurse inside
3074 the RTX, which will normally result in any ADDRESSOF being forced into
3078 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1,
3080 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0,
3084 else if (code
== ADDRESSOF
)
3088 if (GET_CODE (XEXP (x
, 0)) != MEM
)
3089 put_addressof_into_stack (x
, ht
);
3091 /* We must create a copy of the rtx because it was created by
3092 overwriting a REG rtx which is always shared. */
3093 sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
3094 if (validate_change (insn
, loc
, sub
, 0)
3095 || validate_replace_rtx (x
, sub
, insn
))
3100 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3101 Otherwise, perhaps SUB is an expression, so generate code to compute
3103 if (GET_CODE (sub
) == REG
&& REGNO (sub
) <= LAST_VIRTUAL_REGISTER
)
3104 sub
= copy_to_reg (sub
);
3106 sub
= force_operand (sub
, NULL_RTX
);
3108 if (! validate_change (insn
, loc
, sub
, 0)
3109 && ! validate_replace_rtx (x
, sub
, insn
))
3112 insns
= get_insns ();
3114 emit_insn_before (insns
, insn
);
3118 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
3120 rtx sub
= XEXP (XEXP (x
, 0), 0);
3122 if (GET_CODE (sub
) == MEM
)
3123 sub
= adjust_address_nv (sub
, GET_MODE (x
), 0);
3124 else if (GET_CODE (sub
) == REG
3125 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
3127 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
3129 int size_x
, size_sub
;
3133 /* Postpone for now, so that we do not emit bitfield arithmetics
3134 unless there is some benefit from it. */
3135 if (!postponed_insns
|| XEXP (postponed_insns
, 0) != insn
)
3136 postponed_insns
= alloc_INSN_LIST (insn
, postponed_insns
);
3142 /* When processing REG_NOTES look at the list of
3143 replacements done on the insn to find the register that X
3147 for (tem
= purge_bitfield_addressof_replacements
;
3149 tem
= XEXP (XEXP (tem
, 1), 1))
3150 if (rtx_equal_p (x
, XEXP (tem
, 0)))
3152 *loc
= XEXP (XEXP (tem
, 1), 0);
3156 /* See comment for purge_addressof_replacements. */
3157 for (tem
= purge_addressof_replacements
;
3159 tem
= XEXP (XEXP (tem
, 1), 1))
3160 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3162 rtx z
= XEXP (XEXP (tem
, 1), 0);
3164 if (GET_MODE (x
) == GET_MODE (z
)
3165 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
3166 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
3169 /* It can happen that the note may speak of things
3170 in a wider (or just different) mode than the
3171 code did. This is especially true of
3174 if (GET_CODE (z
) == SUBREG
&& SUBREG_BYTE (z
) == 0)
3177 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
3178 && (GET_MODE_SIZE (GET_MODE (x
))
3179 > GET_MODE_SIZE (GET_MODE (z
))))
3181 /* This can occur as a result in invalid
3182 pointer casts, e.g. float f; ...
3183 *(long long int *)&f.
3184 ??? We could emit a warning here, but
3185 without a line number that wouldn't be
3187 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
3190 z
= gen_lowpart (GET_MODE (x
), z
);
3196 /* When we are processing the REG_NOTES of the last instruction
3197 of a libcall, there will be typically no replacements
3198 for that insn; the replacements happened before, piecemeal
3199 fashion. OTOH we are not interested in the details of
3200 this for the REG_EQUAL note, we want to know the big picture,
3201 which can be succinctly described with a simple SUBREG.
3202 Note that removing the REG_EQUAL note is not an option
3203 on the last insn of a libcall, so we must do a replacement. */
3205 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3207 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3208 [0 S8 A32]), which can be expressed with a simple
3210 if ((GET_MODE_SIZE (GET_MODE (x
))
3211 <= GET_MODE_SIZE (GET_MODE (sub
)))
3212 /* Again, invalid pointer casts (as in
3213 compile/990203-1.c) can require paradoxical
3215 || (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
3216 && (GET_MODE_SIZE (GET_MODE (x
))
3217 > GET_MODE_SIZE (GET_MODE (sub
)))
3220 *loc
= gen_rtx_SUBREG (GET_MODE (x
), sub
, 0);
3223 /* ??? Are there other cases we should handle? */
3225 /* Sometimes we may not be able to find the replacement. For
3226 example when the original insn was a MEM in a wider mode,
3227 and the note is part of a sign extension of a narrowed
3228 version of that MEM. Gcc testcase compile/990829-1.c can
3229 generate an example of this situation. Rather than complain
3230 we return false, which will prompt our caller to remove the
3235 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
3236 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
3238 /* Do not frob unchanging MEMs. If a later reference forces the
3239 pseudo to the stack, we can wind up with multiple writes to
3240 an unchanging memory, which is invalid. */
3241 if (RTX_UNCHANGING_P (x
) && size_x
!= size_sub
)
3244 /* Don't even consider working with paradoxical subregs,
3245 or the moral equivalent seen here. */
3246 else if (size_x
<= size_sub
3247 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
3249 /* Do a bitfield insertion to mirror what would happen
3256 rtx p
= PREV_INSN (insn
);
3259 val
= gen_reg_rtx (GET_MODE (x
));
3260 if (! validate_change (insn
, loc
, val
, 0))
3262 /* Discard the current sequence and put the
3263 ADDRESSOF on stack. */
3269 emit_insn_before (seq
, insn
);
3270 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3274 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
3275 val
, GET_MODE_SIZE (GET_MODE (sub
)));
3277 /* Make sure to unshare any shared rtl that store_bit_field
3278 might have created. */
3279 unshare_all_rtl_again (get_insns ());
3283 p
= emit_insn_after (seq
, insn
);
3284 if (NEXT_INSN (insn
))
3285 compute_insns_for_mem (NEXT_INSN (insn
),
3286 p
? NEXT_INSN (p
) : NULL_RTX
,
3291 rtx p
= PREV_INSN (insn
);
3294 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
3295 GET_MODE (x
), GET_MODE (x
),
3296 GET_MODE_SIZE (GET_MODE (sub
)));
3298 if (! validate_change (insn
, loc
, val
, 0))
3300 /* Discard the current sequence and put the
3301 ADDRESSOF on stack. */
3308 emit_insn_before (seq
, insn
);
3309 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3313 /* Remember the replacement so that the same one can be done
3314 on the REG_NOTES. */
3315 purge_bitfield_addressof_replacements
3316 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
3319 purge_bitfield_addressof_replacements
));
3321 /* We replaced with a reg -- all done. */
3326 else if (validate_change (insn
, loc
, sub
, 0))
3328 /* Remember the replacement so that the same one can be done
3329 on the REG_NOTES. */
3330 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3334 for (tem
= purge_addressof_replacements
;
3336 tem
= XEXP (XEXP (tem
, 1), 1))
3337 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3339 XEXP (XEXP (tem
, 1), 0) = sub
;
3342 purge_addressof_replacements
3343 = gen_rtx_EXPR_LIST (VOIDmode
, XEXP (x
, 0),
3344 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3345 purge_addressof_replacements
));
3353 /* Scan all subexpressions. */
3354 fmt
= GET_RTX_FORMAT (code
);
3355 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3358 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0,
3360 else if (*fmt
== 'E')
3361 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3362 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0,
3369 /* Return a hash value for K, a REG. */
3372 insns_for_mem_hash (const void *k
)
3374 /* Use the address of the key for the hash value. */
3375 struct insns_for_mem_entry
*m
= (struct insns_for_mem_entry
*) k
;
3376 return htab_hash_pointer (m
->key
);
3379 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3382 insns_for_mem_comp (const void *k1
, const void *k2
)
3384 struct insns_for_mem_entry
*m1
= (struct insns_for_mem_entry
*) k1
;
3385 struct insns_for_mem_entry
*m2
= (struct insns_for_mem_entry
*) k2
;
3386 return m1
->key
== m2
->key
;
3389 struct insns_for_mem_walk_info
3391 /* The hash table that we are using to record which INSNs use which
3395 /* The INSN we are currently processing. */
3398 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3399 to find the insns that use the REGs in the ADDRESSOFs. */
3403 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3404 that might be used in an ADDRESSOF expression, record this INSN in
3405 the hash table given by DATA (which is really a pointer to an
3406 insns_for_mem_walk_info structure). */
3409 insns_for_mem_walk (rtx
*r
, void *data
)
3411 struct insns_for_mem_walk_info
*ifmwi
3412 = (struct insns_for_mem_walk_info
*) data
;
3413 struct insns_for_mem_entry tmp
;
3414 tmp
.insns
= NULL_RTX
;
3416 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3417 && GET_CODE (XEXP (*r
, 0)) == REG
)
3420 tmp
.key
= XEXP (*r
, 0);
3421 e
= htab_find_slot (ifmwi
->ht
, &tmp
, INSERT
);
3424 *e
= ggc_alloc (sizeof (tmp
));
3425 memcpy (*e
, &tmp
, sizeof (tmp
));
3428 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3430 struct insns_for_mem_entry
*ifme
;
3432 ifme
= htab_find (ifmwi
->ht
, &tmp
);
3434 /* If we have not already recorded this INSN, do so now. Since
3435 we process the INSNs in order, we know that if we have
3436 recorded it it must be at the front of the list. */
3437 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3438 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3445 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3446 which REGs in HT. */
3449 compute_insns_for_mem (rtx insns
, rtx last_insn
, htab_t ht
)
3452 struct insns_for_mem_walk_info ifmwi
;
3455 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3456 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3460 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3464 /* Helper function for purge_addressof called through for_each_rtx.
3465 Returns true iff the rtl is an ADDRESSOF. */
3468 is_addressof (rtx
*rtl
, void *data ATTRIBUTE_UNUSED
)
3470 return GET_CODE (*rtl
) == ADDRESSOF
;
3473 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3474 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3478 purge_addressof (rtx insns
)
3483 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3484 requires a fixup pass over the instruction stream to correct
3485 INSNs that depended on the REG being a REG, and not a MEM. But,
3486 these fixup passes are slow. Furthermore, most MEMs are not
3487 mentioned in very many instructions. So, we speed up the process
3488 by pre-calculating which REGs occur in which INSNs; that allows
3489 us to perform the fixup passes much more quickly. */
3490 ht
= htab_create_ggc (1000, insns_for_mem_hash
, insns_for_mem_comp
, NULL
);
3491 compute_insns_for_mem (insns
, NULL_RTX
, ht
);
3493 postponed_insns
= NULL
;
3495 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3498 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3499 asm_noperands (PATTERN (insn
)) > 0, 0, 1, ht
))
3500 /* If we could not replace the ADDRESSOFs in the insn,
3501 something is wrong. */
3504 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, 0, ht
))
3506 /* If we could not replace the ADDRESSOFs in the insn's notes,
3507 we can just remove the offending notes instead. */
3510 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3512 /* If we find a REG_RETVAL note then the insn is a libcall.
3513 Such insns must have REG_EQUAL notes as well, in order
3514 for later passes of the compiler to work. So it is not
3515 safe to delete the notes here, and instead we abort. */
3516 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3518 if (for_each_rtx (¬e
, is_addressof
, NULL
))
3519 remove_note (insn
, note
);
3524 /* Process the postponed insns. */
3525 while (postponed_insns
)
3527 insn
= XEXP (postponed_insns
, 0);
3528 tmp
= postponed_insns
;
3529 postponed_insns
= XEXP (postponed_insns
, 1);
3530 free_INSN_LIST_node (tmp
);
3532 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3533 asm_noperands (PATTERN (insn
)) > 0, 0, 0, ht
))
3538 purge_bitfield_addressof_replacements
= 0;
3539 purge_addressof_replacements
= 0;
3541 /* REGs are shared. purge_addressof will destructively replace a REG
3542 with a MEM, which creates shared MEMs.
3544 Unfortunately, the children of put_reg_into_stack assume that MEMs
3545 referring to the same stack slot are shared (fixup_var_refs and
3546 the associated hash table code).
3548 So, we have to do another unsharing pass after we have flushed any
3549 REGs that had their address taken into the stack.
3551 It may be worth tracking whether or not we converted any REGs into
3552 MEMs to avoid this overhead when it is not needed. */
3553 unshare_all_rtl_again (get_insns ());
3556 /* Convert a SET of a hard subreg to a set of the appropriate hard
3557 register. A subroutine of purge_hard_subreg_sets. */
3560 purge_single_hard_subreg_set (rtx pattern
)
3562 rtx reg
= SET_DEST (pattern
);
3563 enum machine_mode mode
= GET_MODE (SET_DEST (pattern
));
3566 if (GET_CODE (reg
) == SUBREG
&& GET_CODE (SUBREG_REG (reg
)) == REG
3567 && REGNO (SUBREG_REG (reg
)) < FIRST_PSEUDO_REGISTER
)
3569 offset
= subreg_regno_offset (REGNO (SUBREG_REG (reg
)),
3570 GET_MODE (SUBREG_REG (reg
)),
3573 reg
= SUBREG_REG (reg
);
3577 if (GET_CODE (reg
) == REG
&& REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
3579 reg
= gen_rtx_REG (mode
, REGNO (reg
) + offset
);
3580 SET_DEST (pattern
) = reg
;
3584 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3585 only such SETs that we expect to see are those left in because
3586 integrate can't handle sets of parts of a return value register.
3588 We don't use alter_subreg because we only want to eliminate subregs
3589 of hard registers. */
3592 purge_hard_subreg_sets (rtx insn
)
3594 for (; insn
; insn
= NEXT_INSN (insn
))
3598 rtx pattern
= PATTERN (insn
);
3599 switch (GET_CODE (pattern
))
3602 if (GET_CODE (SET_DEST (pattern
)) == SUBREG
)
3603 purge_single_hard_subreg_set (pattern
);
3608 for (j
= XVECLEN (pattern
, 0) - 1; j
>= 0; j
--)
3610 rtx inner_pattern
= XVECEXP (pattern
, 0, j
);
3611 if (GET_CODE (inner_pattern
) == SET
3612 && GET_CODE (SET_DEST (inner_pattern
)) == SUBREG
)
3613 purge_single_hard_subreg_set (inner_pattern
);
3624 /* Pass through the INSNS of function FNDECL and convert virtual register
3625 references to hard register references. */
3628 instantiate_virtual_regs (tree fndecl
, rtx insns
)
3633 /* Compute the offsets to use for this function. */
3634 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3635 var_offset
= STARTING_FRAME_OFFSET
;
3636 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3637 out_arg_offset
= STACK_POINTER_OFFSET
;
3638 cfa_offset
= ARG_POINTER_CFA_OFFSET (fndecl
);
3640 /* Scan all variables and parameters of this function. For each that is
3641 in memory, instantiate all virtual registers if the result is a valid
3642 address. If not, we do it later. That will handle most uses of virtual
3643 regs on many machines. */
3644 instantiate_decls (fndecl
, 1);
3646 /* Initialize recognition, indicating that volatile is OK. */
3649 /* Scan through all the insns, instantiating every virtual register still
3651 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3652 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3653 || GET_CODE (insn
) == CALL_INSN
)
3655 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3656 if (INSN_DELETED_P (insn
))
3658 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3659 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3660 if (GET_CODE (insn
) == CALL_INSN
)
3661 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn
),
3664 /* Past this point all ASM statements should match. Verify that
3665 to avoid failures later in the compilation process. */
3666 if (asm_noperands (PATTERN (insn
)) >= 0
3667 && ! check_asm_operands (PATTERN (insn
)))
3668 instantiate_virtual_regs_lossage (insn
);
3671 /* Instantiate the stack slots for the parm registers, for later use in
3672 addressof elimination. */
3673 for (i
= 0; i
< max_parm_reg
; ++i
)
3674 if (parm_reg_stack_loc
[i
])
3675 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3677 /* Now instantiate the remaining register equivalences for debugging info.
3678 These will not be valid addresses. */
3679 instantiate_decls (fndecl
, 0);
3681 /* Indicate that, from now on, assign_stack_local should use
3682 frame_pointer_rtx. */
3683 virtuals_instantiated
= 1;
3686 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3687 all virtual registers in their DECL_RTL's.
3689 If VALID_ONLY, do this only if the resulting address is still valid.
3690 Otherwise, always do it. */
3693 instantiate_decls (tree fndecl
, int valid_only
)
3697 /* Process all parameters of the function. */
3698 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3700 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3701 HOST_WIDE_INT size_rtl
;
3703 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3705 /* If the parameter was promoted, then the incoming RTL mode may be
3706 larger than the declared type size. We must use the larger of
3708 size_rtl
= GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
)));
3709 size
= MAX (size_rtl
, size
);
3710 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3713 /* Now process all variables defined in the function or its subblocks. */
3714 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3717 /* Subroutine of instantiate_decls: Process all decls in the given
3718 BLOCK node and all its subblocks. */
3721 instantiate_decls_1 (tree let
, int valid_only
)
3725 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3726 if (DECL_RTL_SET_P (t
))
3727 instantiate_decl (DECL_RTL (t
),
3728 int_size_in_bytes (TREE_TYPE (t
)),
3731 /* Process all subblocks. */
3732 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3733 instantiate_decls_1 (t
, valid_only
);
3736 /* Subroutine of the preceding procedures: Given RTL representing a
3737 decl and the size of the object, do any instantiation required.
3739 If VALID_ONLY is nonzero, it means that the RTL should only be
3740 changed if the new address is valid. */
3743 instantiate_decl (rtx x
, HOST_WIDE_INT size
, int valid_only
)
3745 enum machine_mode mode
;
3748 /* If this is not a MEM, no need to do anything. Similarly if the
3749 address is a constant or a register that is not a virtual register. */
3751 if (x
== 0 || GET_CODE (x
) != MEM
)
3755 if (CONSTANT_P (addr
)
3756 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3757 || (GET_CODE (addr
) == REG
3758 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3759 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3762 /* If we should only do this if the address is valid, copy the address.
3763 We need to do this so we can undo any changes that might make the
3764 address invalid. This copy is unfortunate, but probably can't be
3768 addr
= copy_rtx (addr
);
3770 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3772 if (valid_only
&& size
>= 0)
3774 unsigned HOST_WIDE_INT decl_size
= size
;
3776 /* Now verify that the resulting address is valid for every integer or
3777 floating-point mode up to and including SIZE bytes long. We do this
3778 since the object might be accessed in any mode and frame addresses
3781 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3782 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
3783 mode
= GET_MODE_WIDER_MODE (mode
))
3784 if (! memory_address_p (mode
, addr
))
3787 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3788 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
3789 mode
= GET_MODE_WIDER_MODE (mode
))
3790 if (! memory_address_p (mode
, addr
))
3794 /* Put back the address now that we have updated it and we either know
3795 it is valid or we don't care whether it is valid. */
3800 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3801 is a virtual register, return the equivalent hard register and set the
3802 offset indirectly through the pointer. Otherwise, return 0. */
3805 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
3808 HOST_WIDE_INT offset
;
3810 if (x
== virtual_incoming_args_rtx
)
3811 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3812 else if (x
== virtual_stack_vars_rtx
)
3813 new = frame_pointer_rtx
, offset
= var_offset
;
3814 else if (x
== virtual_stack_dynamic_rtx
)
3815 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3816 else if (x
== virtual_outgoing_args_rtx
)
3817 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3818 else if (x
== virtual_cfa_rtx
)
3819 new = arg_pointer_rtx
, offset
= cfa_offset
;
3828 /* Called when instantiate_virtual_regs has failed to update the instruction.
3829 Usually this means that non-matching instruction has been emit, however for
3830 asm statements it may be the problem in the constraints. */
3832 instantiate_virtual_regs_lossage (rtx insn
)
3834 if (asm_noperands (PATTERN (insn
)) >= 0)
3836 error_for_asm (insn
, "impossible constraint in `asm'");
3842 /* Given a pointer to a piece of rtx and an optional pointer to the
3843 containing object, instantiate any virtual registers present in it.
3845 If EXTRA_INSNS, we always do the replacement and generate
3846 any extra insns before OBJECT. If it zero, we do nothing if replacement
3849 Return 1 if we either had nothing to do or if we were able to do the
3850 needed replacement. Return 0 otherwise; we only return zero if
3851 EXTRA_INSNS is zero.
3853 We first try some simple transformations to avoid the creation of extra
3857 instantiate_virtual_regs_1 (rtx
*loc
, rtx object
, int extra_insns
)
3862 HOST_WIDE_INT offset
= 0;
3868 /* Re-start here to avoid recursion in common cases. */
3875 /* We may have detected and deleted invalid asm statements. */
3876 if (object
&& INSN_P (object
) && INSN_DELETED_P (object
))
3879 code
= GET_CODE (x
);
3881 /* Check for some special cases. */
3899 /* We are allowed to set the virtual registers. This means that
3900 the actual register should receive the source minus the
3901 appropriate offset. This is used, for example, in the handling
3902 of non-local gotos. */
3903 if ((new = instantiate_new_reg (SET_DEST (x
), &offset
)) != 0)
3905 rtx src
= SET_SRC (x
);
3907 /* We are setting the register, not using it, so the relevant
3908 offset is the negative of the offset to use were we using
3911 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3913 /* The only valid sources here are PLUS or REG. Just do
3914 the simplest possible thing to handle them. */
3915 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3917 instantiate_virtual_regs_lossage (object
);
3922 if (GET_CODE (src
) != REG
)
3923 temp
= force_operand (src
, NULL_RTX
);
3926 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3930 emit_insn_before (seq
, object
);
3933 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3935 instantiate_virtual_regs_lossage (object
);
3940 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3945 /* Handle special case of virtual register plus constant. */
3946 if (CONSTANT_P (XEXP (x
, 1)))
3948 rtx old
, new_offset
;
3950 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3951 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3953 if ((new = instantiate_new_reg (XEXP (XEXP (x
, 0), 0), &offset
)))
3955 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3957 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3966 #ifdef POINTERS_EXTEND_UNSIGNED
3967 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3968 we can commute the PLUS and SUBREG because pointers into the
3969 frame are well-behaved. */
3970 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
&& GET_MODE (x
) == ptr_mode
3971 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3973 = instantiate_new_reg (SUBREG_REG (XEXP (x
, 0)),
3975 && validate_change (object
, loc
,
3976 plus_constant (gen_lowpart (ptr_mode
,
3979 + INTVAL (XEXP (x
, 1))),
3983 else if ((new = instantiate_new_reg (XEXP (x
, 0), &offset
)) == 0)
3985 /* We know the second operand is a constant. Unless the
3986 first operand is a REG (which has been already checked),
3987 it needs to be checked. */
3988 if (GET_CODE (XEXP (x
, 0)) != REG
)
3996 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3998 /* If the new constant is zero, try to replace the sum with just
4000 if (new_offset
== const0_rtx
4001 && validate_change (object
, loc
, new, 0))
4004 /* Next try to replace the register and new offset.
4005 There are two changes to validate here and we can't assume that
4006 in the case of old offset equals new just changing the register
4007 will yield a valid insn. In the interests of a little efficiency,
4008 however, we only call validate change once (we don't queue up the
4009 changes and then call apply_change_group). */
4013 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
4014 : (XEXP (x
, 0) = new,
4015 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
4023 /* Otherwise copy the new constant into a register and replace
4024 constant with that register. */
4025 temp
= gen_reg_rtx (Pmode
);
4027 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
4028 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
4031 /* If that didn't work, replace this expression with a
4032 register containing the sum. */
4035 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
4038 temp
= force_operand (new, NULL_RTX
);
4042 emit_insn_before (seq
, object
);
4043 if (! validate_change (object
, loc
, temp
, 0)
4044 && ! validate_replace_rtx (x
, temp
, object
))
4046 instantiate_virtual_regs_lossage (object
);
4055 /* Fall through to generic two-operand expression case. */
4061 case DIV
: case UDIV
:
4062 case MOD
: case UMOD
:
4063 case AND
: case IOR
: case XOR
:
4064 case ROTATERT
: case ROTATE
:
4065 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
4067 case GE
: case GT
: case GEU
: case GTU
:
4068 case LE
: case LT
: case LEU
: case LTU
:
4069 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
4070 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
4075 /* Most cases of MEM that convert to valid addresses have already been
4076 handled by our scan of decls. The only special handling we
4077 need here is to make a copy of the rtx to ensure it isn't being
4078 shared if we have to change it to a pseudo.
4080 If the rtx is a simple reference to an address via a virtual register,
4081 it can potentially be shared. In such cases, first try to make it
4082 a valid address, which can also be shared. Otherwise, copy it and
4085 First check for common cases that need no processing. These are
4086 usually due to instantiation already being done on a previous instance
4090 if (CONSTANT_ADDRESS_P (temp
)
4091 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4092 || temp
== arg_pointer_rtx
4094 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4095 || temp
== hard_frame_pointer_rtx
4097 || temp
== frame_pointer_rtx
)
4100 if (GET_CODE (temp
) == PLUS
4101 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
4102 && (XEXP (temp
, 0) == frame_pointer_rtx
4103 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4104 || XEXP (temp
, 0) == hard_frame_pointer_rtx
4106 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4107 || XEXP (temp
, 0) == arg_pointer_rtx
4112 if (temp
== virtual_stack_vars_rtx
4113 || temp
== virtual_incoming_args_rtx
4114 || (GET_CODE (temp
) == PLUS
4115 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
4116 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
4117 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
4119 /* This MEM may be shared. If the substitution can be done without
4120 the need to generate new pseudos, we want to do it in place
4121 so all copies of the shared rtx benefit. The call below will
4122 only make substitutions if the resulting address is still
4125 Note that we cannot pass X as the object in the recursive call
4126 since the insn being processed may not allow all valid
4127 addresses. However, if we were not passed on object, we can
4128 only modify X without copying it if X will have a valid
4131 ??? Also note that this can still lose if OBJECT is an insn that
4132 has less restrictions on an address that some other insn.
4133 In that case, we will modify the shared address. This case
4134 doesn't seem very likely, though. One case where this could
4135 happen is in the case of a USE or CLOBBER reference, but we
4136 take care of that below. */
4138 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
4139 object
? object
: x
, 0))
4142 /* Otherwise make a copy and process that copy. We copy the entire
4143 RTL expression since it might be a PLUS which could also be
4145 *loc
= x
= copy_rtx (x
);
4148 /* Fall through to generic unary operation case. */
4151 case STRICT_LOW_PART
:
4153 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
4154 case SIGN_EXTEND
: case ZERO_EXTEND
:
4155 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
4156 case FLOAT
: case FIX
:
4157 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
4162 case POPCOUNT
: case PARITY
:
4163 /* These case either have just one operand or we know that we need not
4164 check the rest of the operands. */
4170 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4171 go ahead and make the invalid one, but do it to a copy. For a REG,
4172 just make the recursive call, since there's no chance of a problem. */
4174 if ((GET_CODE (XEXP (x
, 0)) == MEM
4175 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
4177 || (GET_CODE (XEXP (x
, 0)) == REG
4178 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
4181 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
4186 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4187 in front of this insn and substitute the temporary. */
4188 if ((new = instantiate_new_reg (x
, &offset
)) != 0)
4190 temp
= plus_constant (new, offset
);
4191 if (!validate_change (object
, loc
, temp
, 0))
4197 temp
= force_operand (temp
, NULL_RTX
);
4201 emit_insn_before (seq
, object
);
4202 if (! validate_change (object
, loc
, temp
, 0)
4203 && ! validate_replace_rtx (x
, temp
, object
))
4204 instantiate_virtual_regs_lossage (object
);
4211 if (GET_CODE (XEXP (x
, 0)) == REG
)
4214 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
4216 /* If we have a (addressof (mem ..)), do any instantiation inside
4217 since we know we'll be making the inside valid when we finally
4218 remove the ADDRESSOF. */
4219 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
4228 /* Scan all subexpressions. */
4229 fmt
= GET_RTX_FORMAT (code
);
4230 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
4233 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
4236 else if (*fmt
== 'E')
4237 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4238 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
4245 /* Return the first insn following those generated by `assign_parms'. */
4248 get_first_nonparm_insn (void)
4251 return NEXT_INSN (last_parm_insn
);
4252 return get_insns ();
4255 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4256 This means a type for which function calls must pass an address to the
4257 function or get an address back from the function.
4258 EXP may be a type node or an expression (whose type is tested). */
4261 aggregate_value_p (tree exp
, tree fntype
)
4263 int i
, regno
, nregs
;
4266 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
4269 switch (TREE_CODE (fntype
))
4272 fntype
= get_callee_fndecl (fntype
);
4273 fntype
= fntype
? TREE_TYPE (fntype
) : 0;
4276 fntype
= TREE_TYPE (fntype
);
4281 case IDENTIFIER_NODE
:
4285 /* We don't expect other rtl types here. */
4289 if (TREE_CODE (type
) == VOID_TYPE
)
4291 if (targetm
.calls
.return_in_memory (type
, fntype
))
4293 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4294 and thus can't be returned in registers. */
4295 if (TREE_ADDRESSABLE (type
))
4297 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
4299 /* Make sure we have suitable call-clobbered regs to return
4300 the value in; if not, we must return it in memory. */
4301 reg
= hard_function_value (type
, 0, 0);
4303 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4305 if (GET_CODE (reg
) != REG
)
4308 regno
= REGNO (reg
);
4309 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
4310 for (i
= 0; i
< nregs
; i
++)
4311 if (! call_used_regs
[regno
+ i
])
4316 /* Assign RTL expressions to the function's parameters.
4317 This may involve copying them into registers and using
4318 those registers as the RTL for them. */
4321 assign_parms (tree fndecl
)
4324 CUMULATIVE_ARGS args_so_far
;
4325 /* Total space needed so far for args on the stack,
4326 given as a constant and a tree-expression. */
4327 struct args_size stack_args_size
;
4328 HOST_WIDE_INT extra_pretend_bytes
= 0;
4329 tree fntype
= TREE_TYPE (fndecl
);
4330 tree fnargs
= DECL_ARGUMENTS (fndecl
), orig_fnargs
;
4331 /* This is used for the arg pointer when referring to stack args. */
4332 rtx internal_arg_pointer
;
4333 /* This is a dummy PARM_DECL that we used for the function result if
4334 the function returns a structure. */
4335 tree function_result_decl
= 0;
4336 int varargs_setup
= 0;
4337 int reg_parm_stack_space ATTRIBUTE_UNUSED
= 0;
4338 rtx conversion_insns
= 0;
4340 /* Nonzero if function takes extra anonymous args.
4341 This means the last named arg must be on the stack
4342 right before the anonymous ones. */
4343 int stdarg
= current_function_stdarg
;
4345 /* If the reg that the virtual arg pointer will be translated into is
4346 not a fixed reg or is the stack pointer, make a copy of the virtual
4347 arg pointer, and address parms via the copy. The frame pointer is
4348 considered fixed even though it is not marked as such.
4350 The second time through, simply use ap to avoid generating rtx. */
4352 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4353 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4354 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4355 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4357 internal_arg_pointer
= virtual_incoming_args_rtx
;
4358 current_function_internal_arg_pointer
= internal_arg_pointer
;
4360 stack_args_size
.constant
= 0;
4361 stack_args_size
.var
= 0;
4363 /* If struct value address is treated as the first argument, make it so. */
4364 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
4365 && ! current_function_returns_pcc_struct
4366 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
4368 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4370 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4372 DECL_ARG_TYPE (function_result_decl
) = type
;
4373 TREE_CHAIN (function_result_decl
) = fnargs
;
4374 fnargs
= function_result_decl
;
4377 orig_fnargs
= fnargs
;
4379 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4380 parm_reg_stack_loc
= ggc_alloc_cleared (max_parm_reg
* sizeof (rtx
));
4382 /* If the target wants to split complex arguments into scalars, do so. */
4383 if (targetm
.calls
.split_complex_arg
)
4384 fnargs
= split_complex_args (fnargs
);
4386 #ifdef REG_PARM_STACK_SPACE
4387 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4390 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4391 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4393 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, fndecl
, -1);
4396 /* We haven't yet found an argument that we must push and pretend the
4398 current_function_pretend_args_size
= 0;
4400 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4404 enum machine_mode promoted_mode
, passed_mode
;
4405 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4407 struct locate_and_pad_arg_data locate
;
4408 int passed_pointer
= 0;
4409 int did_conversion
= 0;
4410 tree passed_type
= DECL_ARG_TYPE (parm
);
4411 tree nominal_type
= TREE_TYPE (parm
);
4412 int last_named
= 0, named_arg
;
4415 int pretend_bytes
= 0;
4416 int loaded_in_reg
= 0;
4418 /* Set LAST_NAMED if this is last named arg before last
4424 for (tem
= TREE_CHAIN (parm
); tem
; tem
= TREE_CHAIN (tem
))
4425 if (DECL_NAME (tem
))
4431 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4432 most machines, if this is a varargs/stdarg function, then we treat
4433 the last named arg as if it were anonymous too. */
4434 named_arg
= (targetm
.calls
.strict_argument_naming (&args_so_far
)
4437 if (TREE_TYPE (parm
) == error_mark_node
4438 /* This can happen after weird syntax errors
4439 or if an enum type is defined among the parms. */
4440 || TREE_CODE (parm
) != PARM_DECL
4441 || passed_type
== NULL
)
4443 SET_DECL_RTL (parm
, gen_rtx_MEM (BLKmode
, const0_rtx
));
4444 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
4445 TREE_USED (parm
) = 1;
4449 /* Find mode of arg as it is passed, and mode of arg
4450 as it should be during execution of this function. */
4451 passed_mode
= TYPE_MODE (passed_type
);
4452 nominal_mode
= TYPE_MODE (nominal_type
);
4454 /* If the parm's mode is VOID, its value doesn't matter,
4455 and avoid the usual things like emit_move_insn that could crash. */
4456 if (nominal_mode
== VOIDmode
)
4458 SET_DECL_RTL (parm
, const0_rtx
);
4459 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
4463 /* If the parm is to be passed as a transparent union, use the
4464 type of the first field for the tests below. We have already
4465 verified that the modes are the same. */
4466 if (DECL_TRANSPARENT_UNION (parm
)
4467 || (TREE_CODE (passed_type
) == UNION_TYPE
4468 && TYPE_TRANSPARENT_UNION (passed_type
)))
4469 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4471 /* See if this arg was passed by invisible reference. It is if
4472 it is an object whose size depends on the contents of the
4473 object itself or if the machine requires these objects be passed
4476 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type
))
4477 || TREE_ADDRESSABLE (passed_type
)
4478 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4479 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4480 passed_type
, named_arg
)
4484 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4486 passed_mode
= nominal_mode
= Pmode
;
4488 /* See if the frontend wants to pass this by invisible reference. */
4489 else if (passed_type
!= nominal_type
4490 && POINTER_TYPE_P (passed_type
)
4491 && TREE_TYPE (passed_type
) == nominal_type
)
4493 nominal_type
= passed_type
;
4495 passed_mode
= nominal_mode
= Pmode
;
4498 promoted_mode
= passed_mode
;
4500 if (targetm
.calls
.promote_function_args (TREE_TYPE (fndecl
)))
4502 /* Compute the mode in which the arg is actually extended to. */
4503 unsignedp
= TYPE_UNSIGNED (passed_type
);
4504 promoted_mode
= promote_mode (passed_type
, promoted_mode
,
4508 /* Let machine desc say which reg (if any) the parm arrives in.
4509 0 means it arrives on the stack. */
4510 #ifdef FUNCTION_INCOMING_ARG
4511 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4512 passed_type
, named_arg
);
4514 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4515 passed_type
, named_arg
);
4518 if (entry_parm
== 0)
4519 promoted_mode
= passed_mode
;
4521 /* If this is the last named parameter, do any required setup for
4522 varargs or stdargs. We need to know about the case of this being an
4523 addressable type, in which case we skip the registers it
4524 would have arrived in.
4526 For stdargs, LAST_NAMED will be set for two parameters, the one that
4527 is actually the last named, and the dummy parameter. We only
4528 want to do this action once.
4530 Also, indicate when RTL generation is to be suppressed. */
4531 if (last_named
&& !varargs_setup
)
4533 int varargs_pretend_bytes
= 0;
4534 targetm
.calls
.setup_incoming_varargs (&args_so_far
, promoted_mode
,
4536 &varargs_pretend_bytes
, 0);
4539 /* If the back-end has requested extra stack space, record how
4540 much is needed. Do not change pretend_args_size otherwise
4541 since it may be nonzero from an earlier partial argument. */
4542 if (varargs_pretend_bytes
> 0)
4543 current_function_pretend_args_size
= varargs_pretend_bytes
;
4546 /* Determine parm's home in the stack,
4547 in case it arrives in the stack or we should pretend it did.
4549 Compute the stack position and rtx where the argument arrives
4552 There is one complexity here: If this was a parameter that would
4553 have been passed in registers, but wasn't only because it is
4554 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4555 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4556 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4557 0 as it was the previous time. */
4558 in_regs
= entry_parm
!= 0;
4559 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4562 if (!in_regs
&& !named_arg
)
4565 targetm
.calls
.pretend_outgoing_varargs_named (&args_so_far
);
4568 #ifdef FUNCTION_INCOMING_ARG
4569 in_regs
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4571 pretend_named
) != 0;
4573 in_regs
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4575 pretend_named
) != 0;
4580 /* If this parameter was passed both in registers and in the stack,
4581 use the copy on the stack. */
4582 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4585 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4588 partial
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4589 passed_type
, named_arg
);
4591 /* The caller might already have allocated stack space
4592 for the register parameters. */
4593 && reg_parm_stack_space
== 0)
4595 /* Part of this argument is passed in registers and part
4596 is passed on the stack. Ask the prologue code to extend
4597 the stack part so that we can recreate the full value.
4599 PRETEND_BYTES is the size of the registers we need to store.
4600 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
4601 stack space that the prologue should allocate.
4603 Internally, gcc assumes that the argument pointer is
4604 aligned to STACK_BOUNDARY bits. This is used both for
4605 alignment optimizations (see init_emit) and to locate
4606 arguments that are aligned to more than PARM_BOUNDARY
4607 bits. We must preserve this invariant by rounding
4608 CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to a stack
4611 /* We assume at most one partial arg, and it must be the first
4612 argument on the stack. */
4613 if (extra_pretend_bytes
|| current_function_pretend_args_size
)
4616 pretend_bytes
= partial
* UNITS_PER_WORD
;
4617 current_function_pretend_args_size
4618 = CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
4620 /* We want to align relative to the actual stack pointer, so
4621 don't include this in the stack size until later. */
4622 extra_pretend_bytes
= current_function_pretend_args_size
;
4627 memset (&locate
, 0, sizeof (locate
));
4628 locate_and_pad_parm (promoted_mode
, passed_type
, in_regs
,
4629 entry_parm
? partial
: 0, fndecl
,
4630 &stack_args_size
, &locate
);
4631 /* Adjust offsets to include the pretend args. */
4632 locate
.slot_offset
.constant
+= extra_pretend_bytes
- pretend_bytes
;
4633 locate
.offset
.constant
+= extra_pretend_bytes
- pretend_bytes
;
4638 /* If we're passing this arg using a reg, make its stack home
4639 the aligned stack slot. */
4641 offset_rtx
= ARGS_SIZE_RTX (locate
.slot_offset
);
4643 offset_rtx
= ARGS_SIZE_RTX (locate
.offset
);
4645 if (offset_rtx
== const0_rtx
)
4646 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4648 stack_parm
= gen_rtx_MEM (promoted_mode
,
4649 gen_rtx_PLUS (Pmode
,
4650 internal_arg_pointer
,
4653 set_mem_attributes (stack_parm
, parm
, 1);
4654 if (entry_parm
&& MEM_ATTRS (stack_parm
)->align
< PARM_BOUNDARY
)
4655 set_mem_align (stack_parm
, PARM_BOUNDARY
);
4657 /* Set also REG_ATTRS if parameter was passed in a register. */
4659 set_reg_attrs_for_parm (entry_parm
, stack_parm
);
4662 /* If this parm was passed part in regs and part in memory,
4663 pretend it arrived entirely in memory
4664 by pushing the register-part onto the stack.
4666 In the special case of a DImode or DFmode that is split,
4667 we could put it together in a pseudoreg directly,
4668 but for now that's not worth bothering with. */
4672 /* Handle calls that pass values in multiple non-contiguous
4673 locations. The Irix 6 ABI has examples of this. */
4674 if (GET_CODE (entry_parm
) == PARALLEL
)
4675 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4677 int_size_in_bytes (TREE_TYPE (parm
)));
4680 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
4683 entry_parm
= stack_parm
;
4686 /* If we didn't decide this parm came in a register,
4687 by default it came on the stack. */
4688 if (entry_parm
== 0)
4689 entry_parm
= stack_parm
;
4691 /* Record permanently how this parm was passed. */
4692 set_decl_incoming_rtl (parm
, entry_parm
);
4694 /* If there is actually space on the stack for this parm,
4695 count it in stack_args_size; otherwise set stack_parm to 0
4696 to indicate there is no preallocated stack slot for the parm. */
4698 if (entry_parm
== stack_parm
4699 || (GET_CODE (entry_parm
) == PARALLEL
4700 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4701 #if defined (REG_PARM_STACK_SPACE)
4702 /* On some machines, even if a parm value arrives in a register
4703 there is still an (uninitialized) stack slot allocated
4705 || REG_PARM_STACK_SPACE (fndecl
) > 0
4709 stack_args_size
.constant
+= locate
.size
.constant
;
4710 if (locate
.size
.var
)
4711 ADD_PARM_SIZE (stack_args_size
, locate
.size
.var
);
4714 /* No stack slot was pushed for this parm. */
4717 /* Update info on where next arg arrives in registers. */
4719 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4720 passed_type
, named_arg
);
4722 /* If we can't trust the parm stack slot to be aligned enough
4723 for its ultimate type, don't use that slot after entry.
4724 We'll make another stack slot, if we need one. */
4726 unsigned int thisparm_boundary
4727 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4729 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4733 /* If parm was passed in memory, and we need to convert it on entry,
4734 don't store it back in that same slot. */
4735 if (entry_parm
== stack_parm
4736 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4739 /* When an argument is passed in multiple locations, we can't
4740 make use of this information, but we can save some copying if
4741 the whole argument is passed in a single register. */
4742 if (GET_CODE (entry_parm
) == PARALLEL
4743 && nominal_mode
!= BLKmode
&& passed_mode
!= BLKmode
)
4745 int i
, len
= XVECLEN (entry_parm
, 0);
4747 for (i
= 0; i
< len
; i
++)
4748 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
4749 && GET_CODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0)) == REG
4750 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
4752 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
4754 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
4755 set_decl_incoming_rtl (parm
, entry_parm
);
4760 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4761 in the mode in which it arrives.
4762 STACK_PARM is an RTX for a stack slot where the parameter can live
4763 during the function (in case we want to put it there).
4764 STACK_PARM is 0 if no stack slot was pushed for it.
4766 Now output code if necessary to convert ENTRY_PARM to
4767 the type in which this function declares it,
4768 and store that result in an appropriate place,
4769 which may be a pseudo reg, may be STACK_PARM,
4770 or may be a local stack slot if STACK_PARM is 0.
4772 Set DECL_RTL to that place. */
4774 if (GET_CODE (entry_parm
) == PARALLEL
&& nominal_mode
!= BLKmode
4775 && XVECLEN (entry_parm
, 0) > 1)
4777 /* Reconstitute objects the size of a register or larger using
4778 register operations instead of the stack. */
4779 rtx parmreg
= gen_reg_rtx (nominal_mode
);
4781 if (REG_P (parmreg
))
4783 unsigned int regno
= REGNO (parmreg
);
4785 emit_group_store (parmreg
, entry_parm
, TREE_TYPE (parm
),
4786 int_size_in_bytes (TREE_TYPE (parm
)));
4787 SET_DECL_RTL (parm
, parmreg
);
4790 if (regno
>= max_parm_reg
)
4793 int old_max_parm_reg
= max_parm_reg
;
4795 /* It's slow to expand this one register at a time,
4796 but it's also rare and we need max_parm_reg to be
4797 precisely correct. */
4798 max_parm_reg
= regno
+ 1;
4799 new = ggc_realloc (parm_reg_stack_loc
,
4800 max_parm_reg
* sizeof (rtx
));
4801 memset (new + old_max_parm_reg
, 0,
4802 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4803 parm_reg_stack_loc
= new;
4804 parm_reg_stack_loc
[regno
] = stack_parm
;
4809 if (nominal_mode
== BLKmode
4810 #ifdef BLOCK_REG_PADDING
4811 || (locate
.where_pad
== (BYTES_BIG_ENDIAN
? upward
: downward
)
4812 && GET_MODE_SIZE (promoted_mode
) < UNITS_PER_WORD
)
4814 || GET_CODE (entry_parm
) == PARALLEL
)
4816 /* If a BLKmode arrives in registers, copy it to a stack slot.
4817 Handle calls that pass values in multiple non-contiguous
4818 locations. The Irix 6 ABI has examples of this. */
4819 if (GET_CODE (entry_parm
) == REG
4820 || (GET_CODE (entry_parm
) == PARALLEL
4821 && (!loaded_in_reg
|| !optimize
)))
4823 int size
= int_size_in_bytes (TREE_TYPE (parm
));
4824 int size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
4827 /* Note that we will be storing an integral number of words.
4828 So we have to be careful to ensure that we allocate an
4829 integral number of words. We do this below in the
4830 assign_stack_local if space was not allocated in the argument
4831 list. If it was, this will not work if PARM_BOUNDARY is not
4832 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4833 if it becomes a problem. Exception is when BLKmode arrives
4834 with arguments not conforming to word_mode. */
4836 if (stack_parm
== 0)
4838 stack_parm
= assign_stack_local (BLKmode
, size_stored
, 0);
4839 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
4840 set_mem_attributes (stack_parm
, parm
, 1);
4842 else if (GET_CODE (entry_parm
) == PARALLEL
4843 && GET_MODE(entry_parm
) == BLKmode
)
4845 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4848 mem
= validize_mem (stack_parm
);
4850 /* Handle calls that pass values in multiple non-contiguous
4851 locations. The Irix 6 ABI has examples of this. */
4852 if (GET_CODE (entry_parm
) == PARALLEL
)
4853 emit_group_store (mem
, entry_parm
, TREE_TYPE (parm
), size
);
4858 /* If SIZE is that of a mode no bigger than a word, just use
4859 that mode's store operation. */
4860 else if (size
<= UNITS_PER_WORD
)
4862 enum machine_mode mode
4863 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
4866 #ifdef BLOCK_REG_PADDING
4867 && (size
== UNITS_PER_WORD
4868 || (BLOCK_REG_PADDING (mode
, TREE_TYPE (parm
), 1)
4869 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
4873 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
4874 emit_move_insn (change_address (mem
, mode
, 0), reg
);
4877 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4878 machine must be aligned to the left before storing
4879 to memory. Note that the previous test doesn't
4880 handle all cases (e.g. SIZE == 3). */
4881 else if (size
!= UNITS_PER_WORD
4882 #ifdef BLOCK_REG_PADDING
4883 && (BLOCK_REG_PADDING (mode
, TREE_TYPE (parm
), 1)
4891 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
4892 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
4894 x
= expand_binop (word_mode
, ashl_optab
, reg
,
4895 GEN_INT (by
), 0, 1, OPTAB_WIDEN
);
4896 tem
= change_address (mem
, word_mode
, 0);
4897 emit_move_insn (tem
, x
);
4900 move_block_from_reg (REGNO (entry_parm
), mem
,
4901 size_stored
/ UNITS_PER_WORD
);
4904 move_block_from_reg (REGNO (entry_parm
), mem
,
4905 size_stored
/ UNITS_PER_WORD
);
4907 /* If parm is already bound to register pair, don't change
4909 if (! DECL_RTL_SET_P (parm
))
4910 SET_DECL_RTL (parm
, stack_parm
);
4912 else if (! ((! optimize
4913 && ! DECL_REGISTER (parm
))
4914 || TREE_SIDE_EFFECTS (parm
)
4915 /* If -ffloat-store specified, don't put explicit
4916 float variables into registers. */
4917 || (flag_float_store
4918 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4919 /* Always assign pseudo to structure return or item passed
4920 by invisible reference. */
4921 || passed_pointer
|| parm
== function_result_decl
)
4923 /* Store the parm in a pseudoregister during the function, but we
4924 may need to do it in a wider mode. */
4927 unsigned int regno
, regnoi
= 0, regnor
= 0;
4929 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
4931 promoted_nominal_mode
4932 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4934 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4935 mark_user_reg (parmreg
);
4937 /* If this was an item that we received a pointer to, set DECL_RTL
4941 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)),
4943 set_mem_attributes (x
, parm
, 1);
4944 SET_DECL_RTL (parm
, x
);
4948 SET_DECL_RTL (parm
, parmreg
);
4949 maybe_set_unchanging (DECL_RTL (parm
), parm
);
4952 /* Copy the value into the register. */
4953 if (nominal_mode
!= passed_mode
4954 || promoted_nominal_mode
!= promoted_mode
)
4957 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4958 mode, by the caller. We now have to convert it to
4959 NOMINAL_MODE, if different. However, PARMREG may be in
4960 a different mode than NOMINAL_MODE if it is being stored
4963 If ENTRY_PARM is a hard register, it might be in a register
4964 not valid for operating in its mode (e.g., an odd-numbered
4965 register for a DFmode). In that case, moves are the only
4966 thing valid, so we can't do a convert from there. This
4967 occurs when the calling sequence allow such misaligned
4970 In addition, the conversion may involve a call, which could
4971 clobber parameters which haven't been copied to pseudo
4972 registers yet. Therefore, we must first copy the parm to
4973 a pseudo reg here, and save the conversion until after all
4974 parameters have been moved. */
4976 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4978 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4980 push_to_sequence (conversion_insns
);
4981 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4983 if (GET_CODE (tempreg
) == SUBREG
4984 && GET_MODE (tempreg
) == nominal_mode
4985 && GET_CODE (SUBREG_REG (tempreg
)) == REG
4986 && nominal_mode
== passed_mode
4987 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (entry_parm
)
4988 && GET_MODE_SIZE (GET_MODE (tempreg
))
4989 < GET_MODE_SIZE (GET_MODE (entry_parm
)))
4991 /* The argument is already sign/zero extended, so note it
4993 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
4994 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
4997 /* TREE_USED gets set erroneously during expand_assignment. */
4998 save_tree_used
= TREE_USED (parm
);
4999 expand_assignment (parm
,
5000 make_tree (nominal_type
, tempreg
), 0);
5001 TREE_USED (parm
) = save_tree_used
;
5002 conversion_insns
= get_insns ();
5007 emit_move_insn (parmreg
, validize_mem (entry_parm
));
5009 /* If we were passed a pointer but the actual value
5010 can safely live in a register, put it in one. */
5011 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
5012 /* If by-reference argument was promoted, demote it. */
5013 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
5015 && ! DECL_REGISTER (parm
))
5016 || TREE_SIDE_EFFECTS (parm
)
5017 /* If -ffloat-store specified, don't put explicit
5018 float variables into registers. */
5019 || (flag_float_store
5020 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))))
5022 /* We can't use nominal_mode, because it will have been set to
5023 Pmode above. We must use the actual mode of the parm. */
5024 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
5025 mark_user_reg (parmreg
);
5026 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
5028 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
5029 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
5030 push_to_sequence (conversion_insns
);
5031 emit_move_insn (tempreg
, DECL_RTL (parm
));
5033 convert_to_mode (GET_MODE (parmreg
),
5036 emit_move_insn (parmreg
, DECL_RTL (parm
));
5037 conversion_insns
= get_insns();
5042 emit_move_insn (parmreg
, DECL_RTL (parm
));
5043 SET_DECL_RTL (parm
, parmreg
);
5044 /* STACK_PARM is the pointer, not the parm, and PARMREG is
5048 #ifdef FUNCTION_ARG_CALLEE_COPIES
5049 /* If we are passed an arg by reference and it is our responsibility
5050 to make a copy, do it now.
5051 PASSED_TYPE and PASSED mode now refer to the pointer, not the
5052 original argument, so we must recreate them in the call to
5053 FUNCTION_ARG_CALLEE_COPIES. */
5054 /* ??? Later add code to handle the case that if the argument isn't
5055 modified, don't do the copy. */
5057 else if (passed_pointer
5058 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
5059 TYPE_MODE (TREE_TYPE (passed_type
)),
5060 TREE_TYPE (passed_type
),
5062 && ! TREE_ADDRESSABLE (TREE_TYPE (passed_type
)))
5065 tree type
= TREE_TYPE (passed_type
);
5067 /* This sequence may involve a library call perhaps clobbering
5068 registers that haven't been copied to pseudos yet. */
5070 push_to_sequence (conversion_insns
);
5072 if (!COMPLETE_TYPE_P (type
)
5073 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5074 /* This is a variable sized object. */
5075 copy
= gen_rtx_MEM (BLKmode
,
5076 allocate_dynamic_stack_space
5077 (expr_size (parm
), NULL_RTX
,
5078 TYPE_ALIGN (type
)));
5080 copy
= assign_stack_temp (TYPE_MODE (type
),
5081 int_size_in_bytes (type
), 1);
5082 set_mem_attributes (copy
, parm
, 1);
5084 store_expr (parm
, copy
, 0);
5085 emit_move_insn (parmreg
, XEXP (copy
, 0));
5086 conversion_insns
= get_insns ();
5090 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5092 /* In any case, record the parm's desired stack location
5093 in case we later discover it must live in the stack.
5095 If it is a COMPLEX value, store the stack location for both
5098 if (GET_CODE (parmreg
) == CONCAT
)
5099 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
5101 regno
= REGNO (parmreg
);
5103 if (regno
>= max_parm_reg
)
5106 int old_max_parm_reg
= max_parm_reg
;
5108 /* It's slow to expand this one register at a time,
5109 but it's also rare and we need max_parm_reg to be
5110 precisely correct. */
5111 max_parm_reg
= regno
+ 1;
5112 new = ggc_realloc (parm_reg_stack_loc
,
5113 max_parm_reg
* sizeof (rtx
));
5114 memset (new + old_max_parm_reg
, 0,
5115 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
5116 parm_reg_stack_loc
= new;
5119 if (GET_CODE (parmreg
) == CONCAT
)
5121 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
5123 regnor
= REGNO (gen_realpart (submode
, parmreg
));
5124 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
5126 if (stack_parm
!= 0)
5128 parm_reg_stack_loc
[regnor
]
5129 = gen_realpart (submode
, stack_parm
);
5130 parm_reg_stack_loc
[regnoi
]
5131 = gen_imagpart (submode
, stack_parm
);
5135 parm_reg_stack_loc
[regnor
] = 0;
5136 parm_reg_stack_loc
[regnoi
] = 0;
5140 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
5142 /* Mark the register as eliminable if we did no conversion
5143 and it was copied from memory at a fixed offset,
5144 and the arg pointer was not copied to a pseudo-reg.
5145 If the arg pointer is a pseudo reg or the offset formed
5146 an invalid address, such memory-equivalences
5147 as we make here would screw up life analysis for it. */
5148 if (nominal_mode
== passed_mode
5151 && GET_CODE (stack_parm
) == MEM
5152 && locate
.offset
.var
== 0
5153 && reg_mentioned_p (virtual_incoming_args_rtx
,
5154 XEXP (stack_parm
, 0)))
5156 rtx linsn
= get_last_insn ();
5159 /* Mark complex types separately. */
5160 if (GET_CODE (parmreg
) == CONCAT
)
5161 /* Scan backwards for the set of the real and
5163 for (sinsn
= linsn
; sinsn
!= 0;
5164 sinsn
= prev_nonnote_insn (sinsn
))
5166 set
= single_set (sinsn
);
5168 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
5170 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5171 parm_reg_stack_loc
[regnoi
],
5174 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
5176 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5177 parm_reg_stack_loc
[regnor
],
5180 else if ((set
= single_set (linsn
)) != 0
5181 && SET_DEST (set
) == parmreg
)
5183 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5184 stack_parm
, REG_NOTES (linsn
));
5187 /* For pointer data type, suggest pointer register. */
5188 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
5189 mark_reg_pointer (parmreg
,
5190 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5192 /* If something wants our address, try to use ADDRESSOF. */
5193 if (TREE_ADDRESSABLE (parm
))
5195 /* If we end up putting something into the stack,
5196 fixup_var_refs_insns will need to make a pass over
5197 all the instructions. It looks through the pending
5198 sequences -- but it can't see the ones in the
5199 CONVERSION_INSNS, if they're not on the sequence
5200 stack. So, we go back to that sequence, just so that
5201 the fixups will happen. */
5202 push_to_sequence (conversion_insns
);
5203 put_var_into_stack (parm
, /*rescan=*/true);
5204 conversion_insns
= get_insns ();
5210 /* Value must be stored in the stack slot STACK_PARM
5211 during function execution. */
5213 if (promoted_mode
!= nominal_mode
)
5215 /* Conversion is required. */
5216 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
5218 emit_move_insn (tempreg
, validize_mem (entry_parm
));
5220 push_to_sequence (conversion_insns
);
5221 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
5222 TYPE_UNSIGNED (TREE_TYPE (parm
)));
5224 /* ??? This may need a big-endian conversion on sparc64. */
5225 stack_parm
= adjust_address (stack_parm
, nominal_mode
, 0);
5227 conversion_insns
= get_insns ();
5232 if (entry_parm
!= stack_parm
)
5234 if (stack_parm
== 0)
5237 = assign_stack_local (GET_MODE (entry_parm
),
5238 GET_MODE_SIZE (GET_MODE (entry_parm
)),
5240 set_mem_attributes (stack_parm
, parm
, 1);
5243 if (promoted_mode
!= nominal_mode
)
5245 push_to_sequence (conversion_insns
);
5246 emit_move_insn (validize_mem (stack_parm
),
5247 validize_mem (entry_parm
));
5248 conversion_insns
= get_insns ();
5252 emit_move_insn (validize_mem (stack_parm
),
5253 validize_mem (entry_parm
));
5256 SET_DECL_RTL (parm
, stack_parm
);
5260 if (targetm
.calls
.split_complex_arg
&& fnargs
!= orig_fnargs
)
5262 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
))
5264 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
5265 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
5267 rtx tmp
, real
, imag
;
5268 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
5270 real
= DECL_RTL (fnargs
);
5271 imag
= DECL_RTL (TREE_CHAIN (fnargs
));
5272 if (inner
!= GET_MODE (real
))
5274 real
= gen_lowpart_SUBREG (inner
, real
);
5275 imag
= gen_lowpart_SUBREG (inner
, imag
);
5277 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
5278 SET_DECL_RTL (parm
, tmp
);
5280 real
= DECL_INCOMING_RTL (fnargs
);
5281 imag
= DECL_INCOMING_RTL (TREE_CHAIN (fnargs
));
5282 if (inner
!= GET_MODE (real
))
5284 real
= gen_lowpart_SUBREG (inner
, real
);
5285 imag
= gen_lowpart_SUBREG (inner
, imag
);
5287 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
5288 set_decl_incoming_rtl (parm
, tmp
);
5289 fnargs
= TREE_CHAIN (fnargs
);
5293 SET_DECL_RTL (parm
, DECL_RTL (fnargs
));
5294 set_decl_incoming_rtl (parm
, DECL_INCOMING_RTL (fnargs
));
5296 /* Set MEM_EXPR to the original decl, i.e. to PARM,
5297 instead of the copy of decl, i.e. FNARGS. */
5298 if (DECL_INCOMING_RTL (parm
)
5299 && GET_CODE (DECL_INCOMING_RTL (parm
)) == MEM
)
5300 set_mem_expr (DECL_INCOMING_RTL (parm
), parm
);
5302 fnargs
= TREE_CHAIN (fnargs
);
5306 /* Output all parameter conversion instructions (possibly including calls)
5307 now that all parameters have been copied out of hard registers. */
5308 emit_insn (conversion_insns
);
5310 /* If we are receiving a struct value address as the first argument, set up
5311 the RTL for the function result. As this might require code to convert
5312 the transmitted address to Pmode, we do this here to ensure that possible
5313 preliminary conversions of the address have been emitted already. */
5314 if (function_result_decl
)
5316 tree result
= DECL_RESULT (fndecl
);
5317 rtx addr
= DECL_RTL (function_result_decl
);
5320 addr
= convert_memory_address (Pmode
, addr
);
5321 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
5322 set_mem_attributes (x
, result
, 1);
5323 SET_DECL_RTL (result
, x
);
5326 last_parm_insn
= get_last_insn ();
5328 /* We have aligned all the args, so add space for the pretend args. */
5329 stack_args_size
.constant
+= extra_pretend_bytes
;
5330 current_function_args_size
= stack_args_size
.constant
;
5332 /* Adjust function incoming argument size for alignment and
5335 #ifdef REG_PARM_STACK_SPACE
5336 current_function_args_size
= MAX (current_function_args_size
,
5337 REG_PARM_STACK_SPACE (fndecl
));
5340 current_function_args_size
5341 = ((current_function_args_size
+ STACK_BYTES
- 1)
5342 / STACK_BYTES
) * STACK_BYTES
;
5344 #ifdef ARGS_GROW_DOWNWARD
5345 current_function_arg_offset_rtx
5346 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
5347 : expand_expr (size_diffop (stack_args_size
.var
,
5348 size_int (-stack_args_size
.constant
)),
5349 NULL_RTX
, VOIDmode
, 0));
5351 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
5354 /* See how many bytes, if any, of its args a function should try to pop
5357 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
5358 current_function_args_size
);
5360 /* For stdarg.h function, save info about
5361 regs and stack space used by the named args. */
5363 current_function_args_info
= args_so_far
;
5365 /* Set the rtx used for the function return value. Put this in its
5366 own variable so any optimizers that need this information don't have
5367 to include tree.h. Do this here so it gets done when an inlined
5368 function gets output. */
5370 current_function_return_rtx
5371 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
5372 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
5374 /* If scalar return value was computed in a pseudo-reg, or was a named
5375 return value that got dumped to the stack, copy that to the hard
5377 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
5379 tree decl_result
= DECL_RESULT (fndecl
);
5380 rtx decl_rtl
= DECL_RTL (decl_result
);
5382 if (REG_P (decl_rtl
)
5383 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5384 : DECL_REGISTER (decl_result
))
5388 #ifdef FUNCTION_OUTGOING_VALUE
5389 real_decl_rtl
= FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result
),
5392 real_decl_rtl
= FUNCTION_VALUE (TREE_TYPE (decl_result
),
5395 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
5396 /* The delay slot scheduler assumes that current_function_return_rtx
5397 holds the hard register containing the return value, not a
5398 temporary pseudo. */
5399 current_function_return_rtx
= real_decl_rtl
;
5404 /* If ARGS contains entries with complex types, split the entry into two
5405 entries of the component type. Return a new list of substitutions are
5406 needed, else the old list. */
5409 split_complex_args (tree args
)
5413 /* Before allocating memory, check for the common case of no complex. */
5414 for (p
= args
; p
; p
= TREE_CHAIN (p
))
5416 tree type
= TREE_TYPE (p
);
5417 if (TREE_CODE (type
) == COMPLEX_TYPE
5418 && targetm
.calls
.split_complex_arg (type
))
5424 args
= copy_list (args
);
5426 for (p
= args
; p
; p
= TREE_CHAIN (p
))
5428 tree type
= TREE_TYPE (p
);
5429 if (TREE_CODE (type
) == COMPLEX_TYPE
5430 && targetm
.calls
.split_complex_arg (type
))
5433 tree subtype
= TREE_TYPE (type
);
5435 /* Rewrite the PARM_DECL's type with its component. */
5436 TREE_TYPE (p
) = subtype
;
5437 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
5438 DECL_MODE (p
) = VOIDmode
;
5439 DECL_SIZE (p
) = NULL
;
5440 DECL_SIZE_UNIT (p
) = NULL
;
5443 /* Build a second synthetic decl. */
5444 decl
= build_decl (PARM_DECL
, NULL_TREE
, subtype
);
5445 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
5446 layout_decl (decl
, 0);
5448 /* Splice it in; skip the new decl. */
5449 TREE_CHAIN (decl
) = TREE_CHAIN (p
);
5450 TREE_CHAIN (p
) = decl
;
5458 /* Indicate whether REGNO is an incoming argument to the current function
5459 that was promoted to a wider mode. If so, return the RTX for the
5460 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5461 that REGNO is promoted from and whether the promotion was signed or
5465 promoted_input_arg (unsigned int regno
, enum machine_mode
*pmode
, int *punsignedp
)
5469 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
5470 arg
= TREE_CHAIN (arg
))
5471 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
5472 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
5473 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
5475 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
5476 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (arg
));
5478 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
5479 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
5480 && mode
!= DECL_MODE (arg
))
5482 *pmode
= DECL_MODE (arg
);
5483 *punsignedp
= unsignedp
;
5484 return DECL_INCOMING_RTL (arg
);
5492 /* Compute the size and offset from the start of the stacked arguments for a
5493 parm passed in mode PASSED_MODE and with type TYPE.
5495 INITIAL_OFFSET_PTR points to the current offset into the stacked
5498 The starting offset and size for this parm are returned in
5499 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5500 nonzero, the offset is that of stack slot, which is returned in
5501 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5502 padding required from the initial offset ptr to the stack slot.
5504 IN_REGS is nonzero if the argument will be passed in registers. It will
5505 never be set if REG_PARM_STACK_SPACE is not defined.
5507 FNDECL is the function in which the argument was defined.
5509 There are two types of rounding that are done. The first, controlled by
5510 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5511 list to be aligned to the specific boundary (in bits). This rounding
5512 affects the initial and starting offsets, but not the argument size.
5514 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5515 optionally rounds the size of the parm to PARM_BOUNDARY. The
5516 initial offset is not affected by this rounding, while the size always
5517 is and the starting offset may be. */
5519 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5520 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5521 callers pass in the total size of args so far as
5522 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5525 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
5526 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
5527 struct args_size
*initial_offset_ptr
,
5528 struct locate_and_pad_arg_data
*locate
)
5531 enum direction where_pad
;
5533 int reg_parm_stack_space
= 0;
5534 int part_size_in_regs
;
5536 #ifdef REG_PARM_STACK_SPACE
5537 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
5539 /* If we have found a stack parm before we reach the end of the
5540 area reserved for registers, skip that area. */
5543 if (reg_parm_stack_space
> 0)
5545 if (initial_offset_ptr
->var
)
5547 initial_offset_ptr
->var
5548 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
5549 ssize_int (reg_parm_stack_space
));
5550 initial_offset_ptr
->constant
= 0;
5552 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
5553 initial_offset_ptr
->constant
= reg_parm_stack_space
;
5556 #endif /* REG_PARM_STACK_SPACE */
5558 part_size_in_regs
= 0;
5559 if (reg_parm_stack_space
== 0)
5560 part_size_in_regs
= ((partial
* UNITS_PER_WORD
)
5561 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
5562 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
5565 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
5566 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
5567 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
5568 locate
->where_pad
= where_pad
;
5570 #ifdef ARGS_GROW_DOWNWARD
5571 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
5572 if (initial_offset_ptr
->var
)
5573 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
5574 initial_offset_ptr
->var
);
5578 if (where_pad
!= none
5579 && (!host_integerp (sizetree
, 1)
5580 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
5581 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5582 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
5585 locate
->slot_offset
.constant
+= part_size_in_regs
;
5588 #ifdef REG_PARM_STACK_SPACE
5589 || REG_PARM_STACK_SPACE (fndecl
) > 0
5592 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
5593 &locate
->alignment_pad
);
5595 locate
->size
.constant
= (-initial_offset_ptr
->constant
5596 - locate
->slot_offset
.constant
);
5597 if (initial_offset_ptr
->var
)
5598 locate
->size
.var
= size_binop (MINUS_EXPR
,
5599 size_binop (MINUS_EXPR
,
5601 initial_offset_ptr
->var
),
5602 locate
->slot_offset
.var
);
5604 /* Pad_below needs the pre-rounded size to know how much to pad
5606 locate
->offset
= locate
->slot_offset
;
5607 if (where_pad
== downward
)
5608 pad_below (&locate
->offset
, passed_mode
, sizetree
);
5610 #else /* !ARGS_GROW_DOWNWARD */
5612 #ifdef REG_PARM_STACK_SPACE
5613 || REG_PARM_STACK_SPACE (fndecl
) > 0
5616 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
5617 &locate
->alignment_pad
);
5618 locate
->slot_offset
= *initial_offset_ptr
;
5620 #ifdef PUSH_ROUNDING
5621 if (passed_mode
!= BLKmode
)
5622 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5625 /* Pad_below needs the pre-rounded size to know how much to pad below
5626 so this must be done before rounding up. */
5627 locate
->offset
= locate
->slot_offset
;
5628 if (where_pad
== downward
)
5629 pad_below (&locate
->offset
, passed_mode
, sizetree
);
5631 if (where_pad
!= none
5632 && (!host_integerp (sizetree
, 1)
5633 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
5634 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5636 ADD_PARM_SIZE (locate
->size
, sizetree
);
5638 locate
->size
.constant
-= part_size_in_regs
;
5639 #endif /* ARGS_GROW_DOWNWARD */
5642 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5643 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5646 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
5647 struct args_size
*alignment_pad
)
5649 tree save_var
= NULL_TREE
;
5650 HOST_WIDE_INT save_constant
= 0;
5651 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5652 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
5654 #ifdef SPARC_STACK_BOUNDARY_HACK
5655 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
5656 higher than the real alignment of %sp. However, when it does this,
5657 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
5658 This is a temporary hack while the sparc port is fixed. */
5659 if (SPARC_STACK_BOUNDARY_HACK
)
5663 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5665 save_var
= offset_ptr
->var
;
5666 save_constant
= offset_ptr
->constant
;
5669 alignment_pad
->var
= NULL_TREE
;
5670 alignment_pad
->constant
= 0;
5672 if (boundary
> BITS_PER_UNIT
)
5674 if (offset_ptr
->var
)
5676 tree sp_offset_tree
= ssize_int (sp_offset
);
5677 tree offset
= size_binop (PLUS_EXPR
,
5678 ARGS_SIZE_TREE (*offset_ptr
),
5680 #ifdef ARGS_GROW_DOWNWARD
5681 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
5683 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
5686 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
5687 /* ARGS_SIZE_TREE includes constant term. */
5688 offset_ptr
->constant
= 0;
5689 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5690 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
5695 offset_ptr
->constant
= -sp_offset
+
5696 #ifdef ARGS_GROW_DOWNWARD
5697 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
5699 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
5701 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5702 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5708 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
5710 if (passed_mode
!= BLKmode
)
5712 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5713 offset_ptr
->constant
5714 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5715 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5716 - GET_MODE_SIZE (passed_mode
));
5720 if (TREE_CODE (sizetree
) != INTEGER_CST
5721 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5723 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5724 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5726 ADD_PARM_SIZE (*offset_ptr
, s2
);
5727 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5732 /* Walk the tree of blocks describing the binding levels within a function
5733 and warn about variables the might be killed by setjmp or vfork.
5734 This is done after calling flow_analysis and before global_alloc
5735 clobbers the pseudo-regs to hard regs. */
5738 setjmp_vars_warning (tree block
)
5742 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5744 if (TREE_CODE (decl
) == VAR_DECL
5745 && DECL_RTL_SET_P (decl
)
5746 && GET_CODE (DECL_RTL (decl
)) == REG
5747 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5748 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
5752 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5753 setjmp_vars_warning (sub
);
5756 /* Do the appropriate part of setjmp_vars_warning
5757 but for arguments instead of local variables. */
5760 setjmp_args_warning (void)
5763 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5764 decl
; decl
= TREE_CHAIN (decl
))
5765 if (DECL_RTL (decl
) != 0
5766 && GET_CODE (DECL_RTL (decl
)) == REG
5767 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5768 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
5772 /* If this function call setjmp, put all vars into the stack
5773 unless they were declared `register'. */
5776 setjmp_protect (tree block
)
5779 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5780 if ((TREE_CODE (decl
) == VAR_DECL
5781 || TREE_CODE (decl
) == PARM_DECL
)
5782 && DECL_RTL (decl
) != 0
5783 && (GET_CODE (DECL_RTL (decl
)) == REG
5784 || (GET_CODE (DECL_RTL (decl
)) == MEM
5785 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5786 /* If this variable came from an inline function, it must be
5787 that its life doesn't overlap the setjmp. If there was a
5788 setjmp in the function, it would already be in memory. We
5789 must exclude such variable because their DECL_RTL might be
5790 set to strange things such as virtual_stack_vars_rtx. */
5791 && ! DECL_FROM_INLINE (decl
)
5793 #ifdef NON_SAVING_SETJMP
5794 /* If longjmp doesn't restore the registers,
5795 don't put anything in them. */
5799 ! DECL_REGISTER (decl
)))
5800 put_var_into_stack (decl
, /*rescan=*/true);
5801 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5802 setjmp_protect (sub
);
5805 /* Like the previous function, but for args instead of local variables. */
5808 setjmp_protect_args (void)
5811 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5812 decl
; decl
= TREE_CHAIN (decl
))
5813 if ((TREE_CODE (decl
) == VAR_DECL
5814 || TREE_CODE (decl
) == PARM_DECL
)
5815 && DECL_RTL (decl
) != 0
5816 && (GET_CODE (DECL_RTL (decl
)) == REG
5817 || (GET_CODE (DECL_RTL (decl
)) == MEM
5818 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5820 /* If longjmp doesn't restore the registers,
5821 don't put anything in them. */
5822 #ifdef NON_SAVING_SETJMP
5826 ! DECL_REGISTER (decl
)))
5827 put_var_into_stack (decl
, /*rescan=*/true);
5830 /* Convert a stack slot address ADDR for variable VAR
5831 (from a containing function)
5832 into an address valid in this function (using a static chain). */
5835 fix_lexical_addr (rtx addr
, tree var
)
5838 HOST_WIDE_INT displacement
;
5839 tree context
= decl_function_context (var
);
5840 struct function
*fp
;
5843 /* If this is the present function, we need not do anything. */
5844 if (context
== current_function_decl
)
5847 fp
= find_function_data (context
);
5849 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5850 addr
= XEXP (XEXP (addr
, 0), 0);
5852 /* Decode given address as base reg plus displacement. */
5853 if (GET_CODE (addr
) == REG
)
5854 basereg
= addr
, displacement
= 0;
5855 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5856 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5863 /* Use same offset, relative to appropriate static chain or argument
5865 return plus_constant (base
, displacement
);
5868 /* Put all this function's BLOCK nodes including those that are chained
5869 onto the first block into a vector, and return it.
5870 Also store in each NOTE for the beginning or end of a block
5871 the index of that block in the vector.
5872 The arguments are BLOCK, the chain of top-level blocks of the function,
5873 and INSNS, the insn chain of the function. */
5876 identify_blocks (void)
5879 tree
*block_vector
, *last_block_vector
;
5881 tree block
= DECL_INITIAL (current_function_decl
);
5886 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5887 depth-first order. */
5888 block_vector
= get_block_vector (block
, &n_blocks
);
5889 block_stack
= xmalloc (n_blocks
* sizeof (tree
));
5891 last_block_vector
= identify_blocks_1 (get_insns (),
5893 block_vector
+ n_blocks
,
5896 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5897 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5898 if (0 && last_block_vector
!= block_vector
+ n_blocks
)
5901 free (block_vector
);
5905 /* Subroutine of identify_blocks. Do the block substitution on the
5906 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5908 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5909 BLOCK_VECTOR is incremented for each block seen. */
5912 identify_blocks_1 (rtx insns
, tree
*block_vector
, tree
*end_block_vector
,
5913 tree
*orig_block_stack
)
5916 tree
*block_stack
= orig_block_stack
;
5918 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5920 if (GET_CODE (insn
) == NOTE
)
5922 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5926 /* If there are more block notes than BLOCKs, something
5928 if (block_vector
== end_block_vector
)
5931 b
= *block_vector
++;
5932 NOTE_BLOCK (insn
) = b
;
5935 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5937 /* If there are more NOTE_INSN_BLOCK_ENDs than
5938 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5939 if (block_stack
== orig_block_stack
)
5942 NOTE_BLOCK (insn
) = *--block_stack
;
5945 else if (GET_CODE (insn
) == CALL_INSN
5946 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
5948 rtx cp
= PATTERN (insn
);
5950 block_vector
= identify_blocks_1 (XEXP (cp
, 0), block_vector
,
5951 end_block_vector
, block_stack
);
5953 block_vector
= identify_blocks_1 (XEXP (cp
, 1), block_vector
,
5954 end_block_vector
, block_stack
);
5956 block_vector
= identify_blocks_1 (XEXP (cp
, 2), block_vector
,
5957 end_block_vector
, block_stack
);
5961 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5962 something is badly wrong. */
5963 if (block_stack
!= orig_block_stack
)
5966 return block_vector
;
5969 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5970 and create duplicate blocks. */
5971 /* ??? Need an option to either create block fragments or to create
5972 abstract origin duplicates of a source block. It really depends
5973 on what optimization has been performed. */
5976 reorder_blocks (void)
5978 tree block
= DECL_INITIAL (current_function_decl
);
5979 varray_type block_stack
;
5981 if (block
== NULL_TREE
)
5984 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
5986 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
5987 clear_block_marks (block
);
5989 /* Prune the old trees away, so that they don't get in the way. */
5990 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
5991 BLOCK_CHAIN (block
) = NULL_TREE
;
5993 /* Recreate the block tree from the note nesting. */
5994 reorder_blocks_1 (get_insns (), block
, &block_stack
);
5995 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
5997 /* Remove deleted blocks from the block fragment chains. */
5998 reorder_fix_fragments (block
);
6001 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6004 clear_block_marks (tree block
)
6008 TREE_ASM_WRITTEN (block
) = 0;
6009 clear_block_marks (BLOCK_SUBBLOCKS (block
));
6010 block
= BLOCK_CHAIN (block
);
6015 reorder_blocks_1 (rtx insns
, tree current_block
, varray_type
*p_block_stack
)
6019 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6021 if (GET_CODE (insn
) == NOTE
)
6023 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
6025 tree block
= NOTE_BLOCK (insn
);
6027 /* If we have seen this block before, that means it now
6028 spans multiple address regions. Create a new fragment. */
6029 if (TREE_ASM_WRITTEN (block
))
6031 tree new_block
= copy_node (block
);
6034 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
6035 ? BLOCK_FRAGMENT_ORIGIN (block
)
6037 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
6038 BLOCK_FRAGMENT_CHAIN (new_block
)
6039 = BLOCK_FRAGMENT_CHAIN (origin
);
6040 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
6042 NOTE_BLOCK (insn
) = new_block
;
6046 BLOCK_SUBBLOCKS (block
) = 0;
6047 TREE_ASM_WRITTEN (block
) = 1;
6048 /* When there's only one block for the entire function,
6049 current_block == block and we mustn't do this, it
6050 will cause infinite recursion. */
6051 if (block
!= current_block
)
6053 BLOCK_SUPERCONTEXT (block
) = current_block
;
6054 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
6055 BLOCK_SUBBLOCKS (current_block
) = block
;
6056 current_block
= block
;
6058 VARRAY_PUSH_TREE (*p_block_stack
, block
);
6060 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
6062 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (*p_block_stack
);
6063 VARRAY_POP (*p_block_stack
);
6064 BLOCK_SUBBLOCKS (current_block
)
6065 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
6066 current_block
= BLOCK_SUPERCONTEXT (current_block
);
6069 else if (GET_CODE (insn
) == CALL_INSN
6070 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
6072 rtx cp
= PATTERN (insn
);
6073 reorder_blocks_1 (XEXP (cp
, 0), current_block
, p_block_stack
);
6075 reorder_blocks_1 (XEXP (cp
, 1), current_block
, p_block_stack
);
6077 reorder_blocks_1 (XEXP (cp
, 2), current_block
, p_block_stack
);
6082 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6083 appears in the block tree, select one of the fragments to become
6084 the new origin block. */
6087 reorder_fix_fragments (tree block
)
6091 tree dup_origin
= BLOCK_FRAGMENT_ORIGIN (block
);
6092 tree new_origin
= NULL_TREE
;
6096 if (! TREE_ASM_WRITTEN (dup_origin
))
6098 new_origin
= BLOCK_FRAGMENT_CHAIN (dup_origin
);
6100 /* Find the first of the remaining fragments. There must
6101 be at least one -- the current block. */
6102 while (! TREE_ASM_WRITTEN (new_origin
))
6103 new_origin
= BLOCK_FRAGMENT_CHAIN (new_origin
);
6104 BLOCK_FRAGMENT_ORIGIN (new_origin
) = NULL_TREE
;
6107 else if (! dup_origin
)
6110 /* Re-root the rest of the fragments to the new origin. In the
6111 case that DUP_ORIGIN was null, that means BLOCK was the origin
6112 of a chain of fragments and we want to remove those fragments
6113 that didn't make it to the output. */
6116 tree
*pp
= &BLOCK_FRAGMENT_CHAIN (new_origin
);
6121 if (TREE_ASM_WRITTEN (chain
))
6123 BLOCK_FRAGMENT_ORIGIN (chain
) = new_origin
;
6125 pp
= &BLOCK_FRAGMENT_CHAIN (chain
);
6127 chain
= BLOCK_FRAGMENT_CHAIN (chain
);
6132 reorder_fix_fragments (BLOCK_SUBBLOCKS (block
));
6133 block
= BLOCK_CHAIN (block
);
6137 /* Reverse the order of elements in the chain T of blocks,
6138 and return the new head of the chain (old last element). */
6141 blocks_nreverse (tree t
)
6143 tree prev
= 0, decl
, next
;
6144 for (decl
= t
; decl
; decl
= next
)
6146 next
= BLOCK_CHAIN (decl
);
6147 BLOCK_CHAIN (decl
) = prev
;
6153 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6154 non-NULL, list them all into VECTOR, in a depth-first preorder
6155 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6159 all_blocks (tree block
, tree
*vector
)
6165 TREE_ASM_WRITTEN (block
) = 0;
6167 /* Record this block. */
6169 vector
[n_blocks
] = block
;
6173 /* Record the subblocks, and their subblocks... */
6174 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
6175 vector
? vector
+ n_blocks
: 0);
6176 block
= BLOCK_CHAIN (block
);
6182 /* Return a vector containing all the blocks rooted at BLOCK. The
6183 number of elements in the vector is stored in N_BLOCKS_P. The
6184 vector is dynamically allocated; it is the caller's responsibility
6185 to call `free' on the pointer returned. */
6188 get_block_vector (tree block
, int *n_blocks_p
)
6192 *n_blocks_p
= all_blocks (block
, NULL
);
6193 block_vector
= xmalloc (*n_blocks_p
* sizeof (tree
));
6194 all_blocks (block
, block_vector
);
6196 return block_vector
;
6199 static GTY(()) int next_block_index
= 2;
6201 /* Set BLOCK_NUMBER for all the blocks in FN. */
6204 number_blocks (tree fn
)
6210 /* For SDB and XCOFF debugging output, we start numbering the blocks
6211 from 1 within each function, rather than keeping a running
6213 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6214 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
6215 next_block_index
= 1;
6218 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
6220 /* The top-level BLOCK isn't numbered at all. */
6221 for (i
= 1; i
< n_blocks
; ++i
)
6222 /* We number the blocks from two. */
6223 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
6225 free (block_vector
);
6230 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6233 debug_find_var_in_block_tree (tree var
, tree block
)
6237 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
6241 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
6243 tree ret
= debug_find_var_in_block_tree (var
, t
);
6251 /* Allocate a function structure for FNDECL and set its contents
6255 allocate_struct_function (tree fndecl
)
6258 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
6260 cfun
= ggc_alloc_cleared (sizeof (struct function
));
6262 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
6264 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
6265 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
6267 current_function_funcdef_no
= funcdef_no
++;
6269 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
6271 init_stmt_for_function ();
6272 init_eh_for_function ();
6274 lang_hooks
.function
.init (cfun
);
6275 if (init_machine_status
)
6276 cfun
->machine
= (*init_machine_status
) ();
6281 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
6282 cfun
->decl
= fndecl
;
6284 result
= DECL_RESULT (fndecl
);
6285 if (aggregate_value_p (result
, fndecl
))
6287 #ifdef PCC_STATIC_STRUCT_RETURN
6288 current_function_returns_pcc_struct
= 1;
6290 current_function_returns_struct
= 1;
6293 current_function_returns_pointer
= POINTER_TYPE_P (TREE_TYPE (result
));
6295 current_function_stdarg
6297 && TYPE_ARG_TYPES (fntype
) != 0
6298 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
6299 != void_type_node
));
6302 /* Reset cfun, and other non-struct-function variables to defaults as
6303 appropriate for emitting rtl at the start of a function. */
6306 prepare_function_start (tree fndecl
)
6308 if (fndecl
&& DECL_STRUCT_FUNCTION (fndecl
))
6309 cfun
= DECL_STRUCT_FUNCTION (fndecl
);
6311 allocate_struct_function (fndecl
);
6313 init_varasm_status (cfun
);
6316 cse_not_expected
= ! optimize
;
6318 /* Caller save not needed yet. */
6319 caller_save_needed
= 0;
6321 /* We haven't done register allocation yet. */
6324 /* Indicate that we need to distinguish between the return value of the
6325 present function and the return value of a function being called. */
6326 rtx_equal_function_value_matters
= 1;
6328 /* Indicate that we have not instantiated virtual registers yet. */
6329 virtuals_instantiated
= 0;
6331 /* Indicate that we want CONCATs now. */
6332 generating_concat_p
= 1;
6334 /* Indicate we have no need of a frame pointer yet. */
6335 frame_pointer_needed
= 0;
6338 /* Initialize the rtl expansion mechanism so that we can do simple things
6339 like generate sequences. This is used to provide a context during global
6340 initialization of some passes. */
6342 init_dummy_function_start (void)
6344 prepare_function_start (NULL
);
6347 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6348 and initialize static variables for generating RTL for the statements
6352 init_function_start (tree subr
)
6354 prepare_function_start (subr
);
6356 /* Within function body, compute a type's size as soon it is laid out. */
6357 immediate_size_expand
++;
6359 /* Prevent ever trying to delete the first instruction of a
6360 function. Also tell final how to output a linenum before the
6361 function prologue. Note linenums could be missing, e.g. when
6362 compiling a Java .class file. */
6363 if (DECL_SOURCE_LINE (subr
))
6364 emit_line_note (DECL_SOURCE_LOCATION (subr
));
6366 /* Make sure first insn is a note even if we don't want linenums.
6367 This makes sure the first insn will never be deleted.
6368 Also, final expects a note to appear there. */
6369 emit_note (NOTE_INSN_DELETED
);
6371 /* Warn if this value is an aggregate type,
6372 regardless of which calling convention we are using for it. */
6373 if (warn_aggregate_return
6374 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
6375 warning ("function returns an aggregate");
6378 /* Make sure all values used by the optimization passes have sane
6381 init_function_for_compilation (void)
6385 /* No prologue/epilogue insns yet. */
6386 VARRAY_GROW (prologue
, 0);
6387 VARRAY_GROW (epilogue
, 0);
6388 VARRAY_GROW (sibcall_epilogue
, 0);
6391 /* Expand a call to __main at the beginning of a possible main function. */
6393 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6394 #undef HAS_INIT_SECTION
6395 #define HAS_INIT_SECTION
6399 expand_main_function (void)
6401 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6402 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
)
6404 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
6408 /* Forcibly align the stack. */
6409 #ifdef STACK_GROWS_DOWNWARD
6410 tmp
= expand_simple_binop (Pmode
, AND
, stack_pointer_rtx
, GEN_INT(-align
),
6411 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
6413 tmp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
6414 GEN_INT (align
- 1), NULL_RTX
, 1, OPTAB_WIDEN
);
6415 tmp
= expand_simple_binop (Pmode
, AND
, tmp
, GEN_INT (-align
),
6416 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
6418 if (tmp
!= stack_pointer_rtx
)
6419 emit_move_insn (stack_pointer_rtx
, tmp
);
6421 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6422 tmp
= force_reg (Pmode
, const0_rtx
);
6423 allocate_dynamic_stack_space (tmp
, NULL_RTX
, BIGGEST_ALIGNMENT
);
6427 for (tmp
= get_last_insn (); tmp
; tmp
= PREV_INSN (tmp
))
6428 if (NOTE_P (tmp
) && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_FUNCTION_BEG
)
6431 emit_insn_before (seq
, tmp
);
6437 #ifndef HAS_INIT_SECTION
6438 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
6442 /* The PENDING_SIZES represent the sizes of variable-sized types.
6443 Create RTL for the various sizes now (using temporary variables),
6444 so that we can refer to the sizes from the RTL we are generating
6445 for the current function. The PENDING_SIZES are a TREE_LIST. The
6446 TREE_VALUE of each node is a SAVE_EXPR. */
6449 expand_pending_sizes (tree pending_sizes
)
6453 /* Evaluate now the sizes of any types declared among the arguments. */
6454 for (tem
= pending_sizes
; tem
; tem
= TREE_CHAIN (tem
))
6456 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
6457 /* Flush the queue in case this parameter declaration has
6463 /* Start the RTL for a new function, and set variables used for
6465 SUBR is the FUNCTION_DECL node.
6466 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6467 the function's parameters, which must be run at any return statement. */
6470 expand_function_start (tree subr
, int parms_have_cleanups
)
6472 /* Make sure volatile mem refs aren't considered
6473 valid operands of arithmetic insns. */
6474 init_recog_no_volatile ();
6476 current_function_profile
6478 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
6480 current_function_limit_stack
6481 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
6483 /* If the parameters of this function need cleaning up, get a label
6484 for the beginning of the code which executes those cleanups. This must
6485 be done before doing anything with return_label. */
6486 if (parms_have_cleanups
)
6487 cleanup_label
= gen_label_rtx ();
6491 /* Make the label for return statements to jump to. Do not special
6492 case machines with special return instructions -- they will be
6493 handled later during jump, ifcvt, or epilogue creation. */
6494 return_label
= gen_label_rtx ();
6496 /* Initialize rtx used to return the value. */
6497 /* Do this before assign_parms so that we copy the struct value address
6498 before any library calls that assign parms might generate. */
6500 /* Decide whether to return the value in memory or in a register. */
6501 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
6503 /* Returning something that won't go in a register. */
6504 rtx value_address
= 0;
6506 #ifdef PCC_STATIC_STRUCT_RETURN
6507 if (current_function_returns_pcc_struct
)
6509 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
6510 value_address
= assemble_static_space (size
);
6515 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 1);
6516 /* Expect to be passed the address of a place to store the value.
6517 If it is passed as an argument, assign_parms will take care of
6521 value_address
= gen_reg_rtx (Pmode
);
6522 emit_move_insn (value_address
, sv
);
6527 rtx x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
6528 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
6529 SET_DECL_RTL (DECL_RESULT (subr
), x
);
6532 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
6533 /* If return mode is void, this decl rtl should not be used. */
6534 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
6537 /* Compute the return values into a pseudo reg, which we will copy
6538 into the true return register after the cleanups are done. */
6540 /* In order to figure out what mode to use for the pseudo, we
6541 figure out what the mode of the eventual return register will
6542 actually be, and use that. */
6544 = hard_function_value (TREE_TYPE (DECL_RESULT (subr
)),
6547 /* Structures that are returned in registers are not aggregate_value_p,
6548 so we may see a PARALLEL or a REG. */
6549 if (REG_P (hard_reg
))
6550 SET_DECL_RTL (DECL_RESULT (subr
), gen_reg_rtx (GET_MODE (hard_reg
)));
6551 else if (GET_CODE (hard_reg
) == PARALLEL
)
6552 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
6556 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6557 result to the real return register(s). */
6558 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6561 /* Initialize rtx for parameters and local variables.
6562 In some cases this requires emitting insns. */
6563 assign_parms (subr
);
6565 /* If function gets a static chain arg, store it. */
6566 if (cfun
->static_chain_decl
)
6568 tree parm
= cfun
->static_chain_decl
;
6569 rtx local
= gen_reg_rtx (Pmode
);
6571 set_decl_incoming_rtl (parm
, static_chain_incoming_rtx
);
6572 SET_DECL_RTL (parm
, local
);
6573 maybe_set_unchanging (local
, parm
);
6574 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
6576 emit_move_insn (local
, static_chain_incoming_rtx
);
6579 /* If the function receives a non-local goto, then store the
6580 bits we need to restore the frame pointer. */
6581 if (cfun
->nonlocal_goto_save_area
)
6586 /* ??? We need to do this save early. Unfortunately here is
6587 before the frame variable gets declared. Help out... */
6588 expand_var (TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0));
6590 t_save
= build (ARRAY_REF
, ptr_type_node
, cfun
->nonlocal_goto_save_area
,
6592 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6594 emit_move_insn (r_save
, virtual_stack_vars_rtx
);
6595 update_nonlocal_goto_save_area ();
6598 /* The following was moved from init_function_start.
6599 The move is supposed to make sdb output more accurate. */
6600 /* Indicate the beginning of the function body,
6601 as opposed to parm setup. */
6602 emit_note (NOTE_INSN_FUNCTION_BEG
);
6604 if (GET_CODE (get_last_insn ()) != NOTE
)
6605 emit_note (NOTE_INSN_DELETED
);
6606 parm_birth_insn
= get_last_insn ();
6608 if (current_function_profile
)
6611 PROFILE_HOOK (current_function_funcdef_no
);
6615 /* After the display initializations is where the tail-recursion label
6616 should go, if we end up needing one. Ensure we have a NOTE here
6617 since some things (like trampolines) get placed before this. */
6618 tail_recursion_reentry
= emit_note (NOTE_INSN_DELETED
);
6620 /* Evaluate now the sizes of any types declared among the arguments. */
6621 expand_pending_sizes (nreverse (get_pending_sizes ()));
6623 /* Make sure there is a line number after the function entry setup code. */
6624 force_next_line_note ();
6627 /* Undo the effects of init_dummy_function_start. */
6629 expand_dummy_function_end (void)
6631 /* End any sequences that failed to be closed due to syntax errors. */
6632 while (in_sequence_p ())
6635 /* Outside function body, can't compute type's actual size
6636 until next function's body starts. */
6638 free_after_parsing (cfun
);
6639 free_after_compilation (cfun
);
6643 /* Call DOIT for each hard register used as a return value from
6644 the current function. */
6647 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
6649 rtx outgoing
= current_function_return_rtx
;
6654 if (GET_CODE (outgoing
) == REG
)
6655 (*doit
) (outgoing
, arg
);
6656 else if (GET_CODE (outgoing
) == PARALLEL
)
6660 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
6662 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
6664 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6671 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
6673 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
6677 clobber_return_register (void)
6679 diddle_return_value (do_clobber_return_reg
, NULL
);
6681 /* In case we do use pseudo to return value, clobber it too. */
6682 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
6684 tree decl_result
= DECL_RESULT (current_function_decl
);
6685 rtx decl_rtl
= DECL_RTL (decl_result
);
6686 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
6688 do_clobber_return_reg (decl_rtl
, NULL
);
6694 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
6696 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
6700 use_return_register (void)
6702 diddle_return_value (do_use_return_reg
, NULL
);
6705 /* Possibly warn about unused parameters. */
6707 do_warn_unused_parameter (tree fn
)
6711 for (decl
= DECL_ARGUMENTS (fn
);
6712 decl
; decl
= TREE_CHAIN (decl
))
6713 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
6714 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
))
6715 warning ("%Junused parameter '%D'", decl
, decl
);
6718 static GTY(()) rtx initial_trampoline
;
6720 /* Generate RTL for the end of the current function. */
6723 expand_function_end (void)
6727 finish_expr_for_function ();
6729 /* If arg_pointer_save_area was referenced only from a nested
6730 function, we will not have initialized it yet. Do that now. */
6731 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
6732 get_arg_pointer_save_area (cfun
);
6734 #ifdef NON_SAVING_SETJMP
6735 /* Don't put any variables in registers if we call setjmp
6736 on a machine that fails to restore the registers. */
6737 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6739 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6740 setjmp_protect (DECL_INITIAL (current_function_decl
));
6742 setjmp_protect_args ();
6746 /* If we are doing stack checking and this function makes calls,
6747 do a stack probe at the start of the function to ensure we have enough
6748 space for another stack frame. */
6749 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6753 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6754 if (GET_CODE (insn
) == CALL_INSN
)
6757 probe_stack_range (STACK_CHECK_PROTECT
,
6758 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6761 emit_insn_before (seq
, tail_recursion_reentry
);
6766 /* Possibly warn about unused parameters.
6767 When frontend does unit-at-a-time, the warning is already
6768 issued at finalization time. */
6769 if (warn_unused_parameter
6770 && !lang_hooks
.callgraph
.expand_function
)
6771 do_warn_unused_parameter (current_function_decl
);
6773 /* End any sequences that failed to be closed due to syntax errors. */
6774 while (in_sequence_p ())
6777 /* Outside function body, can't compute type's actual size
6778 until next function's body starts. */
6779 immediate_size_expand
--;
6781 clear_pending_stack_adjust ();
6782 do_pending_stack_adjust ();
6784 /* @@@ This is a kludge. We want to ensure that instructions that
6785 may trap are not moved into the epilogue by scheduling, because
6786 we don't always emit unwind information for the epilogue.
6787 However, not all machine descriptions define a blockage insn, so
6788 emit an ASM_INPUT to act as one. */
6789 if (flag_non_call_exceptions
)
6790 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
6792 /* Mark the end of the function body.
6793 If control reaches this insn, the function can drop through
6794 without returning a value. */
6795 emit_note (NOTE_INSN_FUNCTION_END
);
6797 /* Must mark the last line number note in the function, so that the test
6798 coverage code can avoid counting the last line twice. This just tells
6799 the code to ignore the immediately following line note, since there
6800 already exists a copy of this note somewhere above. This line number
6801 note is still needed for debugging though, so we can't delete it. */
6802 if (flag_test_coverage
)
6803 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER
);
6805 /* Output a linenumber for the end of the function.
6806 SDB depends on this. */
6807 force_next_line_note ();
6808 emit_line_note (input_location
);
6810 /* Before the return label (if any), clobber the return
6811 registers so that they are not propagated live to the rest of
6812 the function. This can only happen with functions that drop
6813 through; if there had been a return statement, there would
6814 have either been a return rtx, or a jump to the return label.
6816 We delay actual code generation after the current_function_value_rtx
6818 clobber_after
= get_last_insn ();
6820 /* Output the label for the actual return from the function,
6821 if one is expected. This happens either because a function epilogue
6822 is used instead of a return instruction, or because a return was done
6823 with a goto in order to run local cleanups, or because of pcc-style
6824 structure returning. */
6826 emit_label (return_label
);
6828 /* Let except.c know where it should emit the call to unregister
6829 the function context for sjlj exceptions. */
6830 if (flag_exceptions
&& USING_SJLJ_EXCEPTIONS
)
6831 sjlj_emit_function_exit_after (get_last_insn ());
6833 /* If we had calls to alloca, and this machine needs
6834 an accurate stack pointer to exit the function,
6835 insert some code to save and restore the stack pointer. */
6836 if (! EXIT_IGNORE_STACK
6837 && current_function_calls_alloca
)
6841 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
6842 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
6845 /* If scalar return value was computed in a pseudo-reg, or was a named
6846 return value that got dumped to the stack, copy that to the hard
6848 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
6850 tree decl_result
= DECL_RESULT (current_function_decl
);
6851 rtx decl_rtl
= DECL_RTL (decl_result
);
6853 if (REG_P (decl_rtl
)
6854 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
6855 : DECL_REGISTER (decl_result
))
6857 rtx real_decl_rtl
= current_function_return_rtx
;
6859 /* This should be set in assign_parms. */
6860 if (! REG_FUNCTION_VALUE_P (real_decl_rtl
))
6863 /* If this is a BLKmode structure being returned in registers,
6864 then use the mode computed in expand_return. Note that if
6865 decl_rtl is memory, then its mode may have been changed,
6866 but that current_function_return_rtx has not. */
6867 if (GET_MODE (real_decl_rtl
) == BLKmode
)
6868 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
6870 /* If a named return value dumped decl_return to memory, then
6871 we may need to re-do the PROMOTE_MODE signed/unsigned
6873 if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
6875 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
6877 if (targetm
.calls
.promote_function_return (TREE_TYPE (current_function_decl
)))
6878 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
6881 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
6883 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
6885 /* If expand_function_start has created a PARALLEL for decl_rtl,
6886 move the result to the real return registers. Otherwise, do
6887 a group load from decl_rtl for a named return. */
6888 if (GET_CODE (decl_rtl
) == PARALLEL
)
6889 emit_group_move (real_decl_rtl
, decl_rtl
);
6891 emit_group_load (real_decl_rtl
, decl_rtl
,
6892 TREE_TYPE (decl_result
),
6893 int_size_in_bytes (TREE_TYPE (decl_result
)));
6896 emit_move_insn (real_decl_rtl
, decl_rtl
);
6900 /* If returning a structure, arrange to return the address of the value
6901 in a place where debuggers expect to find it.
6903 If returning a structure PCC style,
6904 the caller also depends on this value.
6905 And current_function_returns_pcc_struct is not necessarily set. */
6906 if (current_function_returns_struct
6907 || current_function_returns_pcc_struct
)
6910 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6911 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6912 #ifdef FUNCTION_OUTGOING_VALUE
6914 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
6915 current_function_decl
);
6918 = FUNCTION_VALUE (build_pointer_type (type
), current_function_decl
);
6921 /* Mark this as a function return value so integrate will delete the
6922 assignment and USE below when inlining this function. */
6923 REG_FUNCTION_VALUE_P (outgoing
) = 1;
6925 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6926 value_address
= convert_memory_address (GET_MODE (outgoing
),
6929 emit_move_insn (outgoing
, value_address
);
6931 /* Show return register used to hold result (in this case the address
6933 current_function_return_rtx
= outgoing
;
6936 /* If this is an implementation of throw, do what's necessary to
6937 communicate between __builtin_eh_return and the epilogue. */
6938 expand_eh_return ();
6940 /* Emit the actual code to clobber return register. */
6945 clobber_return_register ();
6949 after
= emit_insn_after (seq
, clobber_after
);
6952 /* Output the label for the naked return from the function, if one is
6953 expected. This is currently used only by __builtin_return. */
6954 if (naked_return_label
)
6955 emit_label (naked_return_label
);
6957 /* ??? This should no longer be necessary since stupid is no longer with
6958 us, but there are some parts of the compiler (eg reload_combine, and
6959 sh mach_dep_reorg) that still try and compute their own lifetime info
6960 instead of using the general framework. */
6961 use_return_register ();
6963 /* Fix up any gotos that jumped out to the outermost
6964 binding level of the function.
6965 Must follow emitting RETURN_LABEL. */
6967 /* If you have any cleanups to do at this point,
6968 and they need to create temporary variables,
6969 then you will lose. */
6970 expand_fixups (get_insns ());
6974 get_arg_pointer_save_area (struct function
*f
)
6976 rtx ret
= f
->x_arg_pointer_save_area
;
6980 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
6981 f
->x_arg_pointer_save_area
= ret
;
6984 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
6988 /* Save the arg pointer at the beginning of the function. The
6989 generated stack slot may not be a valid memory address, so we
6990 have to check it and fix it if necessary. */
6992 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
6996 push_topmost_sequence ();
6997 emit_insn_after (seq
, get_insns ());
6998 pop_topmost_sequence ();
7004 /* Extend a vector that records the INSN_UIDs of INSNS
7005 (a list of one or more insns). */
7008 record_insns (rtx insns
, varray_type
*vecp
)
7015 while (tmp
!= NULL_RTX
)
7018 tmp
= NEXT_INSN (tmp
);
7021 i
= VARRAY_SIZE (*vecp
);
7022 VARRAY_GROW (*vecp
, i
+ len
);
7024 while (tmp
!= NULL_RTX
)
7026 VARRAY_INT (*vecp
, i
) = INSN_UID (tmp
);
7028 tmp
= NEXT_INSN (tmp
);
7032 /* Set the locator of the insn chain starting at INSN to LOC. */
7034 set_insn_locators (rtx insn
, int loc
)
7036 while (insn
!= NULL_RTX
)
7039 INSN_LOCATOR (insn
) = loc
;
7040 insn
= NEXT_INSN (insn
);
7044 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7045 be running after reorg, SEQUENCE rtl is possible. */
7048 contains (rtx insn
, varray_type vec
)
7052 if (GET_CODE (insn
) == INSN
7053 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7056 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
7057 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
7058 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == VARRAY_INT (vec
, j
))
7064 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
7065 if (INSN_UID (insn
) == VARRAY_INT (vec
, j
))
7072 prologue_epilogue_contains (rtx insn
)
7074 if (contains (insn
, prologue
))
7076 if (contains (insn
, epilogue
))
7082 sibcall_epilogue_contains (rtx insn
)
7084 if (sibcall_epilogue
)
7085 return contains (insn
, sibcall_epilogue
);
7090 /* Insert gen_return at the end of block BB. This also means updating
7091 block_for_insn appropriately. */
7094 emit_return_into_block (basic_block bb
, rtx line_note
)
7096 emit_jump_insn_after (gen_return (), BB_END (bb
));
7098 emit_note_copy_after (line_note
, PREV_INSN (BB_END (bb
)));
7100 #endif /* HAVE_return */
7102 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7104 /* These functions convert the epilogue into a variant that does not modify the
7105 stack pointer. This is used in cases where a function returns an object
7106 whose size is not known until it is computed. The called function leaves the
7107 object on the stack, leaves the stack depressed, and returns a pointer to
7110 What we need to do is track all modifications and references to the stack
7111 pointer, deleting the modifications and changing the references to point to
7112 the location the stack pointer would have pointed to had the modifications
7115 These functions need to be portable so we need to make as few assumptions
7116 about the epilogue as we can. However, the epilogue basically contains
7117 three things: instructions to reset the stack pointer, instructions to
7118 reload registers, possibly including the frame pointer, and an
7119 instruction to return to the caller.
7121 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7122 We also make no attempt to validate the insns we make since if they are
7123 invalid, we probably can't do anything valid. The intent is that these
7124 routines get "smarter" as more and more machines start to use them and
7125 they try operating on different epilogues.
7127 We use the following structure to track what the part of the epilogue that
7128 we've already processed has done. We keep two copies of the SP equivalence,
7129 one for use during the insn we are processing and one for use in the next
7130 insn. The difference is because one part of a PARALLEL may adjust SP
7131 and the other may use it. */
7135 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
7136 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
7137 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
7138 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
7139 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
7140 should be set to once we no longer need
7142 rtx const_equiv
[FIRST_PSEUDO_REGISTER
]; /* Any known constant equivalences
7146 static void handle_epilogue_set (rtx
, struct epi_info
*);
7147 static void update_epilogue_consts (rtx
, rtx
, void *);
7148 static void emit_equiv_load (struct epi_info
*);
7150 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7151 no modifications to the stack pointer. Return the new list of insns. */
7154 keep_stack_depressed (rtx insns
)
7157 struct epi_info info
;
7160 /* If the epilogue is just a single instruction, it must be OK as is. */
7161 if (NEXT_INSN (insns
) == NULL_RTX
)
7164 /* Otherwise, start a sequence, initialize the information we have, and
7165 process all the insns we were given. */
7168 info
.sp_equiv_reg
= stack_pointer_rtx
;
7170 info
.equiv_reg_src
= 0;
7172 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
7173 info
.const_equiv
[j
] = 0;
7177 while (insn
!= NULL_RTX
)
7179 next
= NEXT_INSN (insn
);
7188 /* If this insn references the register that SP is equivalent to and
7189 we have a pending load to that register, we must force out the load
7190 first and then indicate we no longer know what SP's equivalent is. */
7191 if (info
.equiv_reg_src
!= 0
7192 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
7194 emit_equiv_load (&info
);
7195 info
.sp_equiv_reg
= 0;
7198 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
7199 info
.new_sp_offset
= info
.sp_offset
;
7201 /* If this is a (RETURN) and the return address is on the stack,
7202 update the address and change to an indirect jump. */
7203 if (GET_CODE (PATTERN (insn
)) == RETURN
7204 || (GET_CODE (PATTERN (insn
)) == PARALLEL
7205 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
7207 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
7209 HOST_WIDE_INT offset
= 0;
7210 rtx jump_insn
, jump_set
;
7212 /* If the return address is in a register, we can emit the insn
7213 unchanged. Otherwise, it must be a MEM and we see what the
7214 base register and offset are. In any case, we have to emit any
7215 pending load to the equivalent reg of SP, if any. */
7216 if (GET_CODE (retaddr
) == REG
)
7218 emit_equiv_load (&info
);
7223 else if (GET_CODE (retaddr
) == MEM
7224 && GET_CODE (XEXP (retaddr
, 0)) == REG
)
7225 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (retaddr
, 0))), offset
= 0;
7226 else if (GET_CODE (retaddr
) == MEM
7227 && GET_CODE (XEXP (retaddr
, 0)) == PLUS
7228 && GET_CODE (XEXP (XEXP (retaddr
, 0), 0)) == REG
7229 && GET_CODE (XEXP (XEXP (retaddr
, 0), 1)) == CONST_INT
)
7231 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (XEXP (retaddr
, 0), 0)));
7232 offset
= INTVAL (XEXP (XEXP (retaddr
, 0), 1));
7237 /* If the base of the location containing the return pointer
7238 is SP, we must update it with the replacement address. Otherwise,
7239 just build the necessary MEM. */
7240 retaddr
= plus_constant (base
, offset
);
7241 if (base
== stack_pointer_rtx
)
7242 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
7243 plus_constant (info
.sp_equiv_reg
,
7246 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
7248 /* If there is a pending load to the equivalent register for SP
7249 and we reference that register, we must load our address into
7250 a scratch register and then do that load. */
7251 if (info
.equiv_reg_src
7252 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
7257 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7258 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
7259 && !fixed_regs
[regno
]
7260 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
7261 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR
->global_live_at_start
,
7263 && !refers_to_regno_p (regno
,
7264 regno
+ hard_regno_nregs
[regno
]
7266 info
.equiv_reg_src
, NULL
)
7267 && info
.const_equiv
[regno
] == 0)
7270 if (regno
== FIRST_PSEUDO_REGISTER
)
7273 reg
= gen_rtx_REG (Pmode
, regno
);
7274 emit_move_insn (reg
, retaddr
);
7278 emit_equiv_load (&info
);
7279 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
7281 /* Show the SET in the above insn is a RETURN. */
7282 jump_set
= single_set (jump_insn
);
7286 SET_IS_RETURN_P (jump_set
) = 1;
7289 /* If SP is not mentioned in the pattern and its equivalent register, if
7290 any, is not modified, just emit it. Otherwise, if neither is set,
7291 replace the reference to SP and emit the insn. If none of those are
7292 true, handle each SET individually. */
7293 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
7294 && (info
.sp_equiv_reg
== stack_pointer_rtx
7295 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
7297 else if (! reg_set_p (stack_pointer_rtx
, insn
)
7298 && (info
.sp_equiv_reg
== stack_pointer_rtx
7299 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
7301 if (! validate_replace_rtx (stack_pointer_rtx
,
7302 plus_constant (info
.sp_equiv_reg
,
7309 else if (GET_CODE (PATTERN (insn
)) == SET
)
7310 handle_epilogue_set (PATTERN (insn
), &info
);
7311 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7313 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
7314 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
7315 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
7320 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
7321 info
.sp_offset
= info
.new_sp_offset
;
7323 /* Now update any constants this insn sets. */
7324 note_stores (PATTERN (insn
), update_epilogue_consts
, &info
);
7328 insns
= get_insns ();
7333 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7334 structure that contains information about what we've seen so far. We
7335 process this SET by either updating that data or by emitting one or
7339 handle_epilogue_set (rtx set
, struct epi_info
*p
)
7341 /* First handle the case where we are setting SP. Record what it is being
7342 set from. If unknown, abort. */
7343 if (reg_set_p (stack_pointer_rtx
, set
))
7345 if (SET_DEST (set
) != stack_pointer_rtx
)
7348 if (GET_CODE (SET_SRC (set
)) == PLUS
)
7350 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
7351 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
7352 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
7353 else if (GET_CODE (XEXP (SET_SRC (set
), 1)) == REG
7354 && REGNO (XEXP (SET_SRC (set
), 1)) < FIRST_PSEUDO_REGISTER
7355 && p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))] != 0)
7357 = INTVAL (p
->const_equiv
[REGNO (XEXP (SET_SRC (set
), 1))]);
7362 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
7364 /* If we are adjusting SP, we adjust from the old data. */
7365 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
7367 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
7368 p
->new_sp_offset
+= p
->sp_offset
;
7371 if (p
->new_sp_equiv_reg
== 0 || GET_CODE (p
->new_sp_equiv_reg
) != REG
)
7377 /* Next handle the case where we are setting SP's equivalent register.
7378 If we already have a value to set it to, abort. We could update, but
7379 there seems little point in handling that case. Note that we have
7380 to allow for the case where we are setting the register set in
7381 the previous part of a PARALLEL inside a single insn. But use the
7382 old offset for any updates within this insn. We must allow for the case
7383 where the register is being set in a different (usually wider) mode than
7385 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
7387 if (p
->equiv_reg_src
!= 0
7388 || GET_CODE (p
->new_sp_equiv_reg
) != REG
7389 || GET_CODE (SET_DEST (set
)) != REG
7390 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) > BITS_PER_WORD
7391 || REGNO (p
->new_sp_equiv_reg
) != REGNO (SET_DEST (set
)))
7395 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
7396 plus_constant (p
->sp_equiv_reg
,
7400 /* Otherwise, replace any references to SP in the insn to its new value
7401 and emit the insn. */
7404 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
7405 plus_constant (p
->sp_equiv_reg
,
7407 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
7408 plus_constant (p
->sp_equiv_reg
,
7414 /* Update the tracking information for registers set to constants. */
7417 update_epilogue_consts (rtx dest
, rtx x
, void *data
)
7419 struct epi_info
*p
= (struct epi_info
*) data
;
7422 if (GET_CODE (dest
) != REG
|| REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
7425 /* If we are either clobbering a register or doing a partial set,
7426 show we don't know the value. */
7427 else if (GET_CODE (x
) == CLOBBER
|| ! rtx_equal_p (dest
, SET_DEST (x
)))
7428 p
->const_equiv
[REGNO (dest
)] = 0;
7430 /* If we are setting it to a constant, record that constant. */
7431 else if (GET_CODE (SET_SRC (x
)) == CONST_INT
)
7432 p
->const_equiv
[REGNO (dest
)] = SET_SRC (x
);
7434 /* If this is a binary operation between a register we have been tracking
7435 and a constant, see if we can compute a new constant value. */
7436 else if (ARITHMETIC_P (SET_SRC (x
))
7437 && GET_CODE (XEXP (SET_SRC (x
), 0)) == REG
7438 && REGNO (XEXP (SET_SRC (x
), 0)) < FIRST_PSEUDO_REGISTER
7439 && p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))] != 0
7440 && GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
7441 && 0 != (new = simplify_binary_operation
7442 (GET_CODE (SET_SRC (x
)), GET_MODE (dest
),
7443 p
->const_equiv
[REGNO (XEXP (SET_SRC (x
), 0))],
7444 XEXP (SET_SRC (x
), 1)))
7445 && GET_CODE (new) == CONST_INT
)
7446 p
->const_equiv
[REGNO (dest
)] = new;
7448 /* Otherwise, we can't do anything with this value. */
7450 p
->const_equiv
[REGNO (dest
)] = 0;
7453 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7456 emit_equiv_load (struct epi_info
*p
)
7458 if (p
->equiv_reg_src
!= 0)
7460 rtx dest
= p
->sp_equiv_reg
;
7462 if (GET_MODE (p
->equiv_reg_src
) != GET_MODE (dest
))
7463 dest
= gen_rtx_REG (GET_MODE (p
->equiv_reg_src
),
7464 REGNO (p
->sp_equiv_reg
));
7466 emit_move_insn (dest
, p
->equiv_reg_src
);
7467 p
->equiv_reg_src
= 0;
7472 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7473 this into place with notes indicating where the prologue ends and where
7474 the epilogue begins. Update the basic block information when possible. */
7477 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED
)
7481 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7484 #ifdef HAVE_prologue
7485 rtx prologue_end
= NULL_RTX
;
7487 #if defined (HAVE_epilogue) || defined(HAVE_return)
7488 rtx epilogue_end
= NULL_RTX
;
7491 #ifdef HAVE_prologue
7495 seq
= gen_prologue ();
7498 /* Retain a map of the prologue insns. */
7499 record_insns (seq
, &prologue
);
7500 prologue_end
= emit_note (NOTE_INSN_PROLOGUE_END
);
7504 set_insn_locators (seq
, prologue_locator
);
7506 /* Can't deal with multiple successors of the entry block
7507 at the moment. Function should always have at least one
7509 if (!ENTRY_BLOCK_PTR
->succ
|| ENTRY_BLOCK_PTR
->succ
->succ_next
)
7512 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
7517 /* If the exit block has no non-fake predecessors, we don't need
7519 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7520 if ((e
->flags
& EDGE_FAKE
) == 0)
7526 if (optimize
&& HAVE_return
)
7528 /* If we're allowed to generate a simple return instruction,
7529 then by definition we don't need a full epilogue. Examine
7530 the block that falls through to EXIT. If it does not
7531 contain any code, examine its predecessors and try to
7532 emit (conditional) return instructions. */
7538 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7539 if (e
->flags
& EDGE_FALLTHRU
)
7545 /* Verify that there are no active instructions in the last block. */
7546 label
= BB_END (last
);
7547 while (label
&& GET_CODE (label
) != CODE_LABEL
)
7549 if (active_insn_p (label
))
7551 label
= PREV_INSN (label
);
7554 if (BB_HEAD (last
) == label
&& GET_CODE (label
) == CODE_LABEL
)
7556 rtx epilogue_line_note
= NULL_RTX
;
7558 /* Locate the line number associated with the closing brace,
7559 if we can find one. */
7560 for (seq
= get_last_insn ();
7561 seq
&& ! active_insn_p (seq
);
7562 seq
= PREV_INSN (seq
))
7563 if (GET_CODE (seq
) == NOTE
&& NOTE_LINE_NUMBER (seq
) > 0)
7565 epilogue_line_note
= seq
;
7569 for (e
= last
->pred
; e
; e
= e_next
)
7571 basic_block bb
= e
->src
;
7574 e_next
= e
->pred_next
;
7575 if (bb
== ENTRY_BLOCK_PTR
)
7579 if ((GET_CODE (jump
) != JUMP_INSN
) || JUMP_LABEL (jump
) != label
)
7582 /* If we have an unconditional jump, we can replace that
7583 with a simple return instruction. */
7584 if (simplejump_p (jump
))
7586 emit_return_into_block (bb
, epilogue_line_note
);
7590 /* If we have a conditional jump, we can try to replace
7591 that with a conditional return instruction. */
7592 else if (condjump_p (jump
))
7594 if (! redirect_jump (jump
, 0, 0))
7597 /* If this block has only one successor, it both jumps
7598 and falls through to the fallthru block, so we can't
7600 if (bb
->succ
->succ_next
== NULL
)
7606 /* Fix up the CFG for the successful change we just made. */
7607 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
7610 /* Emit a return insn for the exit fallthru block. Whether
7611 this is still reachable will be determined later. */
7613 emit_barrier_after (BB_END (last
));
7614 emit_return_into_block (last
, epilogue_line_note
);
7615 epilogue_end
= BB_END (last
);
7616 last
->succ
->flags
&= ~EDGE_FALLTHRU
;
7621 #ifdef HAVE_epilogue
7624 /* Find the edge that falls through to EXIT. Other edges may exist
7625 due to RETURN instructions, but those don't need epilogues.
7626 There really shouldn't be a mixture -- either all should have
7627 been converted or none, however... */
7629 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7630 if (e
->flags
& EDGE_FALLTHRU
)
7636 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
7638 seq
= gen_epilogue ();
7640 #ifdef INCOMING_RETURN_ADDR_RTX
7641 /* If this function returns with the stack depressed and we can support
7642 it, massage the epilogue to actually do that. */
7643 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
7644 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
7645 seq
= keep_stack_depressed (seq
);
7648 emit_jump_insn (seq
);
7650 /* Retain a map of the epilogue insns. */
7651 record_insns (seq
, &epilogue
);
7652 set_insn_locators (seq
, epilogue_locator
);
7657 insert_insn_on_edge (seq
, e
);
7664 commit_edge_insertions ();
7666 #ifdef HAVE_sibcall_epilogue
7667 /* Emit sibling epilogues before any sibling call sites. */
7668 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7670 basic_block bb
= e
->src
;
7671 rtx insn
= BB_END (bb
);
7675 if (GET_CODE (insn
) != CALL_INSN
7676 || ! SIBLING_CALL_P (insn
))
7680 emit_insn (gen_sibcall_epilogue ());
7684 /* Retain a map of the epilogue insns. Used in life analysis to
7685 avoid getting rid of sibcall epilogue insns. Do this before we
7686 actually emit the sequence. */
7687 record_insns (seq
, &sibcall_epilogue
);
7688 set_insn_locators (seq
, epilogue_locator
);
7690 i
= PREV_INSN (insn
);
7691 newinsn
= emit_insn_before (seq
, insn
);
7695 #ifdef HAVE_prologue
7696 /* This is probably all useless now that we use locators. */
7701 /* GDB handles `break f' by setting a breakpoint on the first
7702 line note after the prologue. Which means (1) that if
7703 there are line number notes before where we inserted the
7704 prologue we should move them, and (2) we should generate a
7705 note before the end of the first basic block, if there isn't
7708 ??? This behavior is completely broken when dealing with
7709 multiple entry functions. We simply place the note always
7710 into first basic block and let alternate entry points
7714 for (insn
= prologue_end
; insn
; insn
= prev
)
7716 prev
= PREV_INSN (insn
);
7717 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7719 /* Note that we cannot reorder the first insn in the
7720 chain, since rest_of_compilation relies on that
7721 remaining constant. */
7724 reorder_insns (insn
, insn
, prologue_end
);
7728 /* Find the last line number note in the first block. */
7729 for (insn
= BB_END (ENTRY_BLOCK_PTR
->next_bb
);
7730 insn
!= prologue_end
&& insn
;
7731 insn
= PREV_INSN (insn
))
7732 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7735 /* If we didn't find one, make a copy of the first line number
7739 for (insn
= next_active_insn (prologue_end
);
7741 insn
= PREV_INSN (insn
))
7742 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7744 emit_note_copy_after (insn
, prologue_end
);
7750 #ifdef HAVE_epilogue
7755 /* Similarly, move any line notes that appear after the epilogue.
7756 There is no need, however, to be quite so anal about the existence
7757 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
7758 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
7760 for (insn
= epilogue_end
; insn
; insn
= next
)
7762 next
= NEXT_INSN (insn
);
7763 if (GET_CODE (insn
) == NOTE
7764 && (NOTE_LINE_NUMBER (insn
) > 0
7765 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
7766 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
))
7767 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
7773 /* Reposition the prologue-end and epilogue-begin notes after instruction
7774 scheduling and delayed branch scheduling. */
7777 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED
)
7779 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7780 rtx insn
, last
, note
;
7783 if ((len
= VARRAY_SIZE (prologue
)) > 0)
7787 /* Scan from the beginning until we reach the last prologue insn.
7788 We apparently can't depend on basic_block_{head,end} after
7790 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
7792 if (GET_CODE (insn
) == NOTE
)
7794 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
7797 else if (contains (insn
, prologue
))
7807 /* Find the prologue-end note if we haven't already, and
7808 move it to just after the last prologue insn. */
7811 for (note
= last
; (note
= NEXT_INSN (note
));)
7812 if (GET_CODE (note
) == NOTE
7813 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
7817 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7818 if (GET_CODE (last
) == CODE_LABEL
)
7819 last
= NEXT_INSN (last
);
7820 reorder_insns (note
, note
, last
);
7824 if ((len
= VARRAY_SIZE (epilogue
)) > 0)
7828 /* Scan from the end until we reach the first epilogue insn.
7829 We apparently can't depend on basic_block_{head,end} after
7831 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7833 if (GET_CODE (insn
) == NOTE
)
7835 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
7838 else if (contains (insn
, epilogue
))
7848 /* Find the epilogue-begin note if we haven't already, and
7849 move it to just before the first epilogue insn. */
7852 for (note
= insn
; (note
= PREV_INSN (note
));)
7853 if (GET_CODE (note
) == NOTE
7854 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
7858 if (PREV_INSN (last
) != note
)
7859 reorder_insns (note
, note
, PREV_INSN (last
));
7862 #endif /* HAVE_prologue or HAVE_epilogue */
7865 /* Called once, at initialization, to initialize function.c. */
7868 init_function_once (void)
7870 VARRAY_INT_INIT (prologue
, 0, "prologue");
7871 VARRAY_INT_INIT (epilogue
, 0, "epilogue");
7872 VARRAY_INT_INIT (sibcall_epilogue
, 0, "sibcall_epilogue");
7875 /* Resets insn_block_boundaries array. */
7878 reset_block_changes (void)
7880 VARRAY_TREE_INIT (cfun
->ib_boundaries_block
, 100, "ib_boundaries_block");
7881 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, NULL_TREE
);
7884 /* Record the boundary for BLOCK. */
7886 record_block_change (tree block
)
7894 last_block
= VARRAY_TOP_TREE (cfun
->ib_boundaries_block
);
7895 VARRAY_POP (cfun
->ib_boundaries_block
);
7897 for (i
= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
); i
< n
; i
++)
7898 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, last_block
);
7900 VARRAY_PUSH_TREE (cfun
->ib_boundaries_block
, block
);
7903 /* Finishes record of boundaries. */
7904 void finalize_block_changes (void)
7906 record_block_change (DECL_INITIAL (current_function_decl
));
7909 /* For INSN return the BLOCK it belongs to. */
7911 check_block_change (rtx insn
, tree
*block
)
7913 unsigned uid
= INSN_UID (insn
);
7915 if (uid
>= VARRAY_ACTIVE_SIZE (cfun
->ib_boundaries_block
))
7918 *block
= VARRAY_TREE (cfun
->ib_boundaries_block
, uid
);
7921 /* Releases the ib_boundaries_block records. */
7923 free_block_changes (void)
7925 cfun
->ib_boundaries_block
= NULL
;
7928 /* Returns the name of the current function. */
7930 current_function_name (void)
7932 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
7935 #include "gt-function.h"