1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
43 #include "coretypes.h"
54 #include "hard-reg-set.h"
55 #include "insn-config.h"
58 #include "basic-block.h"
63 #include "integrate.h"
64 #include "langhooks.h"
66 #ifndef TRAMPOLINE_ALIGNMENT
67 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
96 during rtl generation. If they are different register numbers, this is
97 always true. It may also be true if
98 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
99 generation. See fix_lexical_addr for details. */
101 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
102 #define NEED_SEPARATE_AP
105 /* Nonzero if function being compiled doesn't contain any calls
106 (ignoring the prologue and epilogue). This is set prior to
107 local register allocation and is valid for the remaining
109 int current_function_is_leaf
;
111 /* Nonzero if function being compiled doesn't contain any instructions
112 that can throw an exception. This is set prior to final. */
114 int current_function_nothrow
;
116 /* Nonzero if function being compiled doesn't modify the stack pointer
117 (ignoring the prologue and epilogue). This is only valid after
118 life_analysis has run. */
119 int current_function_sp_is_unchanging
;
121 /* Nonzero if the function being compiled is a leaf function which only
122 uses leaf registers. This is valid after reload (specifically after
123 sched2) and is useful only if the port defines LEAF_REGISTERS. */
124 int current_function_uses_only_leaf_regs
;
126 /* Nonzero once virtual register instantiation has been done.
127 assign_stack_local uses frame_pointer_rtx when this is nonzero.
128 calls.c:emit_library_call_value_1 uses it to set up
129 post-instantiation libcalls. */
130 int virtuals_instantiated
;
132 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
133 static GTY(()) int funcdef_no
;
135 /* These variables hold pointers to functions to create and destroy
136 target specific, per-function data structures. */
137 struct machine_function
* (*init_machine_status
) PARAMS ((void));
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl
;
142 /* The currently compiled function. */
143 struct function
*cfun
= 0;
145 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
146 static GTY(()) varray_type prologue
;
147 static GTY(()) varray_type epilogue
;
149 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
151 static GTY(()) varray_type sibcall_epilogue
;
153 /* In order to evaluate some expressions, such as function calls returning
154 structures in memory, we need to temporarily allocate stack locations.
155 We record each allocated temporary in the following structure.
157 Associated with each temporary slot is a nesting level. When we pop up
158 one level, all temporaries associated with the previous level are freed.
159 Normally, all temporaries are freed after the execution of the statement
160 in which they were created. However, if we are inside a ({...}) grouping,
161 the result may be in a temporary and hence must be preserved. If the
162 result could be in a temporary, we preserve it if we can determine which
163 one it is in. If we cannot determine which temporary may contain the
164 result, all temporaries are preserved. A temporary is preserved by
165 pretending it was allocated at the previous nesting level.
167 Automatic variables are also assigned temporary slots, at the nesting
168 level where they are defined. They are marked a "kept" so that
169 free_temp_slots will not free them. */
171 struct temp_slot
GTY(())
173 /* Points to next temporary slot. */
174 struct temp_slot
*next
;
175 /* The rtx to used to reference the slot. */
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
180 /* The alignment (in bits) of the slot. */
182 /* The size, in units, of the slot. */
184 /* The type of the object in the slot, or zero if it doesn't correspond
185 to a type. We use this to determine whether a slot can be reused.
186 It can be reused if objects of the type of the new slot will always
187 conflict with objects of the type of the old slot. */
189 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 /* Nonzero if this temporary is currently in use. */
193 /* Nonzero if this temporary has its address taken. */
195 /* Nesting level at which this slot is being used. */
197 /* Nonzero if this should survive a call to free_temp_slots. */
199 /* The offset of the slot from the frame_pointer, including extra space
200 for alignment. This info is for combine_temp_slots. */
201 HOST_WIDE_INT base_offset
;
202 /* The size of the slot, including extra space for alignment. This
203 info is for combine_temp_slots. */
204 HOST_WIDE_INT full_size
;
207 /* This structure is used to record MEMs or pseudos used to replace VAR, any
208 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
209 maintain this list in case two operands of an insn were required to match;
210 in that case we must ensure we use the same replacement. */
212 struct fixup_replacement
GTY(())
216 struct fixup_replacement
*next
;
219 struct insns_for_mem_entry
223 /* These are the INSNs which reference the MEM. */
227 /* Forward declarations. */
229 static rtx assign_stack_local_1
PARAMS ((enum machine_mode
, HOST_WIDE_INT
,
230 int, struct function
*));
231 static struct temp_slot
*find_temp_slot_from_address
PARAMS ((rtx
));
232 static void put_reg_into_stack
PARAMS ((struct function
*, rtx
, tree
,
233 enum machine_mode
, enum machine_mode
,
234 int, unsigned int, int,
236 static void schedule_fixup_var_refs
PARAMS ((struct function
*, rtx
, tree
,
239 static void fixup_var_refs
PARAMS ((rtx
, enum machine_mode
, int, rtx
,
241 static struct fixup_replacement
242 *find_fixup_replacement
PARAMS ((struct fixup_replacement
**, rtx
));
243 static void fixup_var_refs_insns
PARAMS ((rtx
, rtx
, enum machine_mode
,
245 static void fixup_var_refs_insns_with_hash
246 PARAMS ((htab_t
, rtx
,
247 enum machine_mode
, int, rtx
));
248 static void fixup_var_refs_insn
PARAMS ((rtx
, rtx
, enum machine_mode
,
250 static void fixup_var_refs_1
PARAMS ((rtx
, enum machine_mode
, rtx
*, rtx
,
251 struct fixup_replacement
**, rtx
));
252 static rtx fixup_memory_subreg
PARAMS ((rtx
, rtx
, enum machine_mode
, int));
253 static rtx walk_fixup_memory_subreg
PARAMS ((rtx
, rtx
, enum machine_mode
,
255 static rtx fixup_stack_1
PARAMS ((rtx
, rtx
));
256 static void optimize_bit_field
PARAMS ((rtx
, rtx
, rtx
*));
257 static void instantiate_decls
PARAMS ((tree
, int));
258 static void instantiate_decls_1
PARAMS ((tree
, int));
259 static void instantiate_decl
PARAMS ((rtx
, HOST_WIDE_INT
, int));
260 static rtx instantiate_new_reg
PARAMS ((rtx
, HOST_WIDE_INT
*));
261 static int instantiate_virtual_regs_1
PARAMS ((rtx
*, rtx
, int));
262 static void delete_handlers
PARAMS ((void));
263 static void pad_to_arg_alignment
PARAMS ((struct args_size
*, int,
264 struct args_size
*));
265 static void pad_below
PARAMS ((struct args_size
*, enum machine_mode
,
267 static rtx round_trampoline_addr
PARAMS ((rtx
));
268 static rtx adjust_trampoline_addr
PARAMS ((rtx
));
269 static tree
*identify_blocks_1
PARAMS ((rtx
, tree
*, tree
*, tree
*));
270 static void reorder_blocks_0
PARAMS ((tree
));
271 static void reorder_blocks_1
PARAMS ((rtx
, tree
, varray_type
*));
272 static void reorder_fix_fragments
PARAMS ((tree
));
273 static tree blocks_nreverse
PARAMS ((tree
));
274 static int all_blocks
PARAMS ((tree
, tree
*));
275 static tree
*get_block_vector
PARAMS ((tree
, int *));
276 extern tree debug_find_var_in_block_tree
PARAMS ((tree
, tree
));
277 /* We always define `record_insns' even if its not used so that we
278 can always export `prologue_epilogue_contains'. */
279 static void record_insns
PARAMS ((rtx
, varray_type
*)) ATTRIBUTE_UNUSED
;
280 static int contains
PARAMS ((rtx
, varray_type
));
282 static void emit_return_into_block
PARAMS ((basic_block
, rtx
));
284 static void put_addressof_into_stack
PARAMS ((rtx
, htab_t
));
285 static bool purge_addressof_1
PARAMS ((rtx
*, rtx
, int, int, int, htab_t
));
286 static void purge_single_hard_subreg_set
PARAMS ((rtx
));
287 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
288 static rtx keep_stack_depressed
PARAMS ((rtx
));
290 static int is_addressof
PARAMS ((rtx
*, void *));
291 static hashval_t insns_for_mem_hash
PARAMS ((const void *));
292 static int insns_for_mem_comp
PARAMS ((const void *, const void *));
293 static int insns_for_mem_walk
PARAMS ((rtx
*, void *));
294 static void compute_insns_for_mem
PARAMS ((rtx
, rtx
, htab_t
));
295 static void prepare_function_start
PARAMS ((void));
296 static void do_clobber_return_reg
PARAMS ((rtx
, void *));
297 static void do_use_return_reg
PARAMS ((rtx
, void *));
298 static void instantiate_virtual_regs_lossage
PARAMS ((rtx
));
300 /* Pointer to chain of `struct function' for containing functions. */
301 static GTY(()) struct function
*outer_function_chain
;
303 /* List of insns that were postponed by purge_addressof_1. */
304 static rtx postponed_insns
;
306 /* Given a function decl for a containing function,
307 return the `struct function' for it. */
310 find_function_data (decl
)
315 for (p
= outer_function_chain
; p
; p
= p
->outer
)
322 /* Save the current context for compilation of a nested function.
323 This is called from language-specific code. The caller should use
324 the enter_nested langhook to save any language-specific state,
325 since this function knows only about language-independent
329 push_function_context_to (context
)
336 if (context
== current_function_decl
)
337 cfun
->contains_functions
= 1;
340 struct function
*containing
= find_function_data (context
);
341 containing
->contains_functions
= 1;
346 init_dummy_function_start ();
349 p
->outer
= outer_function_chain
;
350 outer_function_chain
= p
;
351 p
->fixup_var_refs_queue
= 0;
353 (*lang_hooks
.function
.enter_nested
) (p
);
359 push_function_context ()
361 push_function_context_to (current_function_decl
);
364 /* Restore the last saved context, at the end of a nested function.
365 This function is called from language-specific code. */
368 pop_function_context_from (context
)
369 tree context ATTRIBUTE_UNUSED
;
371 struct function
*p
= outer_function_chain
;
372 struct var_refs_queue
*queue
;
375 outer_function_chain
= p
->outer
;
377 current_function_decl
= p
->decl
;
380 restore_emit_status (p
);
382 (*lang_hooks
.function
.leave_nested
) (p
);
384 /* Finish doing put_var_into_stack for any of our variables which became
385 addressable during the nested function. If only one entry has to be
386 fixed up, just do that one. Otherwise, first make a list of MEMs that
387 are not to be unshared. */
388 if (p
->fixup_var_refs_queue
== 0)
390 else if (p
->fixup_var_refs_queue
->next
== 0)
391 fixup_var_refs (p
->fixup_var_refs_queue
->modified
,
392 p
->fixup_var_refs_queue
->promoted_mode
,
393 p
->fixup_var_refs_queue
->unsignedp
,
394 p
->fixup_var_refs_queue
->modified
, 0);
399 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
400 list
= gen_rtx_EXPR_LIST (VOIDmode
, queue
->modified
, list
);
402 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
403 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
404 queue
->unsignedp
, list
, 0);
408 p
->fixup_var_refs_queue
= 0;
410 /* Reset variables that have known state during rtx generation. */
411 rtx_equal_function_value_matters
= 1;
412 virtuals_instantiated
= 0;
413 generating_concat_p
= 1;
417 pop_function_context ()
419 pop_function_context_from (current_function_decl
);
422 /* Clear out all parts of the state in F that can safely be discarded
423 after the function has been parsed, but not compiled, to let
424 garbage collection reclaim the memory. */
427 free_after_parsing (f
)
430 /* f->expr->forced_labels is used by code generation. */
431 /* f->emit->regno_reg_rtx is used by code generation. */
432 /* f->varasm is used by code generation. */
433 /* f->eh->eh_return_stub_label is used by code generation. */
435 (*lang_hooks
.function
.final
) (f
);
439 /* Clear out all parts of the state in F that can safely be discarded
440 after the function has been compiled, to let garbage collection
441 reclaim the memory. */
444 free_after_compilation (f
)
453 f
->x_temp_slots
= NULL
;
454 f
->arg_offset_rtx
= NULL
;
455 f
->return_rtx
= NULL
;
456 f
->internal_arg_pointer
= NULL
;
457 f
->x_nonlocal_labels
= NULL
;
458 f
->x_nonlocal_goto_handler_slots
= NULL
;
459 f
->x_nonlocal_goto_handler_labels
= NULL
;
460 f
->x_nonlocal_goto_stack_level
= NULL
;
461 f
->x_cleanup_label
= NULL
;
462 f
->x_return_label
= NULL
;
463 f
->computed_goto_common_label
= NULL
;
464 f
->computed_goto_common_reg
= NULL
;
465 f
->x_save_expr_regs
= NULL
;
466 f
->x_stack_slot_list
= NULL
;
467 f
->x_rtl_expr_chain
= NULL
;
468 f
->x_tail_recursion_label
= NULL
;
469 f
->x_tail_recursion_reentry
= NULL
;
470 f
->x_arg_pointer_save_area
= NULL
;
471 f
->x_clobber_return_insn
= NULL
;
472 f
->x_context_display
= NULL
;
473 f
->x_trampoline_list
= NULL
;
474 f
->x_parm_birth_insn
= NULL
;
475 f
->x_last_parm_insn
= NULL
;
476 f
->x_parm_reg_stack_loc
= NULL
;
477 f
->fixup_var_refs_queue
= NULL
;
478 f
->original_arg_vector
= NULL
;
479 f
->original_decl_initial
= NULL
;
480 f
->inl_last_parm_insn
= NULL
;
481 f
->epilogue_delay_list
= NULL
;
484 /* Allocate fixed slots in the stack frame of the current function. */
486 /* Return size needed for stack frame based on slots so far allocated in
488 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
489 the caller may have to do that. */
492 get_func_frame_size (f
)
495 #ifdef FRAME_GROWS_DOWNWARD
496 return -f
->x_frame_offset
;
498 return f
->x_frame_offset
;
502 /* Return size needed for stack frame based on slots so far allocated.
503 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
504 the caller may have to do that. */
508 return get_func_frame_size (cfun
);
511 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
512 with machine mode MODE.
514 ALIGN controls the amount of alignment for the address of the slot:
515 0 means according to MODE,
516 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
517 positive specifies alignment boundary in bits.
519 We do not round to stack_boundary here.
521 FUNCTION specifies the function to allocate in. */
524 assign_stack_local_1 (mode
, size
, align
, function
)
525 enum machine_mode mode
;
528 struct function
*function
;
531 int bigend_correction
= 0;
533 int frame_off
, frame_alignment
, frame_phase
;
540 alignment
= BIGGEST_ALIGNMENT
;
542 alignment
= GET_MODE_ALIGNMENT (mode
);
544 /* Allow the target to (possibly) increase the alignment of this
546 type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 0);
548 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
550 alignment
/= BITS_PER_UNIT
;
552 else if (align
== -1)
554 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
555 size
= CEIL_ROUND (size
, alignment
);
558 alignment
= align
/ BITS_PER_UNIT
;
560 #ifdef FRAME_GROWS_DOWNWARD
561 function
->x_frame_offset
-= size
;
564 /* Ignore alignment we can't do with expected alignment of the boundary. */
565 if (alignment
* BITS_PER_UNIT
> PREFERRED_STACK_BOUNDARY
)
566 alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
568 if (function
->stack_alignment_needed
< alignment
* BITS_PER_UNIT
)
569 function
->stack_alignment_needed
= alignment
* BITS_PER_UNIT
;
571 /* Calculate how many bytes the start of local variables is off from
573 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
574 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
575 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
577 /* Round the frame offset to the specified alignment. The default is
578 to always honor requests to align the stack but a port may choose to
579 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
580 if (STACK_ALIGNMENT_NEEDED
584 /* We must be careful here, since FRAME_OFFSET might be negative and
585 division with a negative dividend isn't as well defined as we might
586 like. So we instead assume that ALIGNMENT is a power of two and
587 use logical operations which are unambiguous. */
588 #ifdef FRAME_GROWS_DOWNWARD
589 function
->x_frame_offset
590 = (FLOOR_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
593 function
->x_frame_offset
594 = (CEIL_ROUND (function
->x_frame_offset
- frame_phase
, alignment
)
599 /* On a big-endian machine, if we are allocating more space than we will use,
600 use the least significant bytes of those that are allocated. */
601 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
602 bigend_correction
= size
- GET_MODE_SIZE (mode
);
604 /* If we have already instantiated virtual registers, return the actual
605 address relative to the frame pointer. */
606 if (function
== cfun
&& virtuals_instantiated
)
607 addr
= plus_constant (frame_pointer_rtx
,
609 (frame_offset
+ bigend_correction
610 + STARTING_FRAME_OFFSET
, Pmode
));
612 addr
= plus_constant (virtual_stack_vars_rtx
,
614 (function
->x_frame_offset
+ bigend_correction
,
617 #ifndef FRAME_GROWS_DOWNWARD
618 function
->x_frame_offset
+= size
;
621 x
= gen_rtx_MEM (mode
, addr
);
623 function
->x_stack_slot_list
624 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
629 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
633 assign_stack_local (mode
, size
, align
)
634 enum machine_mode mode
;
638 return assign_stack_local_1 (mode
, size
, align
, cfun
);
641 /* Allocate a temporary stack slot and record it for possible later
644 MODE is the machine mode to be given to the returned rtx.
646 SIZE is the size in units of the space required. We do no rounding here
647 since assign_stack_local will do any required rounding.
649 KEEP is 1 if this slot is to be retained after a call to
650 free_temp_slots. Automatic variables for a block are allocated
651 with this flag. KEEP is 2 if we allocate a longer term temporary,
652 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
653 if we are to allocate something at an inner level to be treated as
654 a variable in the block (e.g., a SAVE_EXPR).
656 TYPE is the type that will be used for the stack slot. */
659 assign_stack_temp_for_type (mode
, size
, keep
, type
)
660 enum machine_mode mode
;
666 struct temp_slot
*p
, *best_p
= 0;
669 /* If SIZE is -1 it means that somebody tried to allocate a temporary
670 of a variable size. */
675 align
= BIGGEST_ALIGNMENT
;
677 align
= GET_MODE_ALIGNMENT (mode
);
680 type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 0);
683 align
= LOCAL_ALIGNMENT (type
, align
);
685 /* Try to find an available, already-allocated temporary of the proper
686 mode which meets the size and alignment requirements. Choose the
687 smallest one with the closest alignment. */
688 for (p
= temp_slots
; p
; p
= p
->next
)
689 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
691 && objects_must_conflict_p (p
->type
, type
)
692 && (best_p
== 0 || best_p
->size
> p
->size
693 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
695 if (p
->align
== align
&& p
->size
== size
)
703 /* Make our best, if any, the one to use. */
706 /* If there are enough aligned bytes left over, make them into a new
707 temp_slot so that the extra bytes don't get wasted. Do this only
708 for BLKmode slots, so that we can be sure of the alignment. */
709 if (GET_MODE (best_p
->slot
) == BLKmode
)
711 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
712 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
714 if (best_p
->size
- rounded_size
>= alignment
)
716 p
= (struct temp_slot
*) ggc_alloc (sizeof (struct temp_slot
));
717 p
->in_use
= p
->addr_taken
= 0;
718 p
->size
= best_p
->size
- rounded_size
;
719 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
720 p
->full_size
= best_p
->full_size
- rounded_size
;
721 p
->slot
= gen_rtx_MEM (BLKmode
,
722 plus_constant (XEXP (best_p
->slot
, 0),
724 p
->align
= best_p
->align
;
727 p
->type
= best_p
->type
;
728 p
->next
= temp_slots
;
731 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
734 best_p
->size
= rounded_size
;
735 best_p
->full_size
= rounded_size
;
742 /* If we still didn't find one, make a new temporary. */
745 HOST_WIDE_INT frame_offset_old
= frame_offset
;
747 p
= (struct temp_slot
*) ggc_alloc (sizeof (struct temp_slot
));
749 /* We are passing an explicit alignment request to assign_stack_local.
750 One side effect of that is assign_stack_local will not round SIZE
751 to ensure the frame offset remains suitably aligned.
753 So for requests which depended on the rounding of SIZE, we go ahead
754 and round it now. We also make sure ALIGNMENT is at least
755 BIGGEST_ALIGNMENT. */
756 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
758 p
->slot
= assign_stack_local (mode
,
760 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
766 /* The following slot size computation is necessary because we don't
767 know the actual size of the temporary slot until assign_stack_local
768 has performed all the frame alignment and size rounding for the
769 requested temporary. Note that extra space added for alignment
770 can be either above or below this stack slot depending on which
771 way the frame grows. We include the extra space if and only if it
772 is above this slot. */
773 #ifdef FRAME_GROWS_DOWNWARD
774 p
->size
= frame_offset_old
- frame_offset
;
779 /* Now define the fields used by combine_temp_slots. */
780 #ifdef FRAME_GROWS_DOWNWARD
781 p
->base_offset
= frame_offset
;
782 p
->full_size
= frame_offset_old
- frame_offset
;
784 p
->base_offset
= frame_offset_old
;
785 p
->full_size
= frame_offset
- frame_offset_old
;
788 p
->next
= temp_slots
;
794 p
->rtl_expr
= seq_rtl_expr
;
799 p
->level
= target_temp_slot_level
;
804 p
->level
= var_temp_slot_level
;
809 p
->level
= temp_slot_level
;
814 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
815 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
816 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
818 /* If we know the alias set for the memory that will be used, use
819 it. If there's no TYPE, then we don't know anything about the
820 alias set for the memory. */
821 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
822 set_mem_align (slot
, align
);
824 /* If a type is specified, set the relevant flags. */
827 RTX_UNCHANGING_P (slot
) = (lang_hooks
.honor_readonly
828 && TYPE_READONLY (type
));
829 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
830 MEM_SET_IN_STRUCT_P (slot
, AGGREGATE_TYPE_P (type
));
836 /* Allocate a temporary stack slot and record it for possible later
837 reuse. First three arguments are same as in preceding function. */
840 assign_stack_temp (mode
, size
, keep
)
841 enum machine_mode mode
;
845 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
848 /* Assign a temporary.
849 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
850 and so that should be used in error messages. In either case, we
851 allocate of the given type.
852 KEEP is as for assign_stack_temp.
853 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
854 it is 0 if a register is OK.
855 DONT_PROMOTE is 1 if we should not promote values in register
859 assign_temp (type_or_decl
, keep
, memory_required
, dont_promote
)
863 int dont_promote ATTRIBUTE_UNUSED
;
866 enum machine_mode mode
;
867 #ifndef PROMOTE_FOR_CALL_ONLY
871 if (DECL_P (type_or_decl
))
872 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
874 decl
= NULL
, type
= type_or_decl
;
876 mode
= TYPE_MODE (type
);
877 #ifndef PROMOTE_FOR_CALL_ONLY
878 unsignedp
= TREE_UNSIGNED (type
);
881 if (mode
== BLKmode
|| memory_required
)
883 HOST_WIDE_INT size
= int_size_in_bytes (type
);
886 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
887 problems with allocating the stack space. */
891 /* Unfortunately, we don't yet know how to allocate variable-sized
892 temporaries. However, sometimes we have a fixed upper limit on
893 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
894 instead. This is the case for Chill variable-sized strings. */
895 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
896 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
897 && host_integerp (TYPE_ARRAY_MAX_SIZE (type
), 1))
898 size
= tree_low_cst (TYPE_ARRAY_MAX_SIZE (type
), 1);
900 /* The size of the temporary may be too large to fit into an integer. */
901 /* ??? Not sure this should happen except for user silliness, so limit
902 this to things that aren't compiler-generated temporaries. The
903 rest of the time we'll abort in assign_stack_temp_for_type. */
904 if (decl
&& size
== -1
905 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
907 error_with_decl (decl
, "size of variable `%s' is too large");
911 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
915 #ifndef PROMOTE_FOR_CALL_ONLY
917 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
920 return gen_reg_rtx (mode
);
923 /* Combine temporary stack slots which are adjacent on the stack.
925 This allows for better use of already allocated stack space. This is only
926 done for BLKmode slots because we can be sure that we won't have alignment
927 problems in this case. */
930 combine_temp_slots ()
932 struct temp_slot
*p
, *q
;
933 struct temp_slot
*prev_p
, *prev_q
;
936 /* We can't combine slots, because the information about which slot
937 is in which alias set will be lost. */
938 if (flag_strict_aliasing
)
941 /* If there are a lot of temp slots, don't do anything unless
942 high levels of optimization. */
943 if (! flag_expensive_optimizations
)
944 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
945 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
948 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
952 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
953 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
956 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
958 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
960 /* Q comes after P; combine Q into P. */
962 p
->full_size
+= q
->full_size
;
965 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
967 /* P comes after Q; combine P into Q. */
969 q
->full_size
+= p
->full_size
;
974 /* Either delete Q or advance past it. */
976 prev_q
->next
= q
->next
;
980 /* Either delete P or advance past it. */
984 prev_p
->next
= p
->next
;
986 temp_slots
= p
->next
;
993 /* Find the temp slot corresponding to the object at address X. */
995 static struct temp_slot
*
996 find_temp_slot_from_address (x
)
1002 for (p
= temp_slots
; p
; p
= p
->next
)
1007 else if (XEXP (p
->slot
, 0) == x
1009 || (GET_CODE (x
) == PLUS
1010 && XEXP (x
, 0) == virtual_stack_vars_rtx
1011 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1012 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
1013 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
1016 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1017 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1018 if (XEXP (next
, 0) == x
)
1022 /* If we have a sum involving a register, see if it points to a temp
1024 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
1025 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
1027 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
1028 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
1034 /* Indicate that NEW is an alternate way of referring to the temp slot
1035 that previously was known by OLD. */
1038 update_temp_slot_address (old
, new)
1041 struct temp_slot
*p
;
1043 if (rtx_equal_p (old
, new))
1046 p
= find_temp_slot_from_address (old
);
1048 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1049 is a register, see if one operand of the PLUS is a temporary
1050 location. If so, NEW points into it. Otherwise, if both OLD and
1051 NEW are a PLUS and if there is a register in common between them.
1052 If so, try a recursive call on those values. */
1055 if (GET_CODE (old
) != PLUS
)
1058 if (GET_CODE (new) == REG
)
1060 update_temp_slot_address (XEXP (old
, 0), new);
1061 update_temp_slot_address (XEXP (old
, 1), new);
1064 else if (GET_CODE (new) != PLUS
)
1067 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
1068 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
1069 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
1070 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
1071 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
1072 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
1073 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
1074 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
1079 /* Otherwise add an alias for the temp's address. */
1080 else if (p
->address
== 0)
1084 if (GET_CODE (p
->address
) != EXPR_LIST
)
1085 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1087 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1091 /* If X could be a reference to a temporary slot, mark the fact that its
1092 address was taken. */
1095 mark_temp_addr_taken (x
)
1098 struct temp_slot
*p
;
1103 /* If X is not in memory or is at a constant address, it cannot be in
1104 a temporary slot. */
1105 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1108 p
= find_temp_slot_from_address (XEXP (x
, 0));
1113 /* If X could be a reference to a temporary slot, mark that slot as
1114 belonging to the to one level higher than the current level. If X
1115 matched one of our slots, just mark that one. Otherwise, we can't
1116 easily predict which it is, so upgrade all of them. Kept slots
1117 need not be touched.
1119 This is called when an ({...}) construct occurs and a statement
1120 returns a value in memory. */
1123 preserve_temp_slots (x
)
1126 struct temp_slot
*p
= 0;
1128 /* If there is no result, we still might have some objects whose address
1129 were taken, so we need to make sure they stay around. */
1132 for (p
= temp_slots
; p
; p
= p
->next
)
1133 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1139 /* If X is a register that is being used as a pointer, see if we have
1140 a temporary slot we know it points to. To be consistent with
1141 the code below, we really should preserve all non-kept slots
1142 if we can't find a match, but that seems to be much too costly. */
1143 if (GET_CODE (x
) == REG
&& REG_POINTER (x
))
1144 p
= find_temp_slot_from_address (x
);
1146 /* If X is not in memory or is at a constant address, it cannot be in
1147 a temporary slot, but it can contain something whose address was
1149 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1151 for (p
= temp_slots
; p
; p
= p
->next
)
1152 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1158 /* First see if we can find a match. */
1160 p
= find_temp_slot_from_address (XEXP (x
, 0));
1164 /* Move everything at our level whose address was taken to our new
1165 level in case we used its address. */
1166 struct temp_slot
*q
;
1168 if (p
->level
== temp_slot_level
)
1170 for (q
= temp_slots
; q
; q
= q
->next
)
1171 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1180 /* Otherwise, preserve all non-kept slots at this level. */
1181 for (p
= temp_slots
; p
; p
= p
->next
)
1182 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1186 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1187 with that RTL_EXPR, promote it into a temporary slot at the present
1188 level so it will not be freed when we free slots made in the
1192 preserve_rtl_expr_result (x
)
1195 struct temp_slot
*p
;
1197 /* If X is not in memory or is at a constant address, it cannot be in
1198 a temporary slot. */
1199 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1202 /* If we can find a match, move it to our level unless it is already at
1204 p
= find_temp_slot_from_address (XEXP (x
, 0));
1207 p
->level
= MIN (p
->level
, temp_slot_level
);
1214 /* Free all temporaries used so far. This is normally called at the end
1215 of generating code for a statement. Don't free any temporaries
1216 currently in use for an RTL_EXPR that hasn't yet been emitted.
1217 We could eventually do better than this since it can be reused while
1218 generating the same RTL_EXPR, but this is complex and probably not
1224 struct temp_slot
*p
;
1226 for (p
= temp_slots
; p
; p
= p
->next
)
1227 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1228 && p
->rtl_expr
== 0)
1231 combine_temp_slots ();
1234 /* Free all temporary slots used in T, an RTL_EXPR node. */
1237 free_temps_for_rtl_expr (t
)
1240 struct temp_slot
*p
;
1242 for (p
= temp_slots
; p
; p
= p
->next
)
1243 if (p
->rtl_expr
== t
)
1245 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1246 needs to be preserved. This can happen if a temporary in
1247 the RTL_EXPR was addressed; preserve_temp_slots will move
1248 the temporary into a higher level. */
1249 if (temp_slot_level
<= p
->level
)
1252 p
->rtl_expr
= NULL_TREE
;
1255 combine_temp_slots ();
1258 /* Mark all temporaries ever allocated in this function as not suitable
1259 for reuse until the current level is exited. */
1262 mark_all_temps_used ()
1264 struct temp_slot
*p
;
1266 for (p
= temp_slots
; p
; p
= p
->next
)
1268 p
->in_use
= p
->keep
= 1;
1269 p
->level
= MIN (p
->level
, temp_slot_level
);
1273 /* Push deeper into the nesting level for stack temporaries. */
1281 /* Pop a temporary nesting level. All slots in use in the current level
1287 struct temp_slot
*p
;
1289 for (p
= temp_slots
; p
; p
= p
->next
)
1290 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1293 combine_temp_slots ();
1298 /* Initialize temporary slots. */
1303 /* We have not allocated any temporaries yet. */
1305 temp_slot_level
= 0;
1306 var_temp_slot_level
= 0;
1307 target_temp_slot_level
= 0;
1310 /* Retroactively move an auto variable from a register to a stack
1311 slot. This is done when an address-reference to the variable is
1312 seen. If RESCAN is true, all previously emitted instructions are
1313 examined and modified to handle the fact that DECL is now
1317 put_var_into_stack (decl
, rescan
)
1322 enum machine_mode promoted_mode
, decl_mode
;
1323 struct function
*function
= 0;
1325 int can_use_addressof
;
1326 int volatilep
= TREE_CODE (decl
) != SAVE_EXPR
&& TREE_THIS_VOLATILE (decl
);
1327 int usedp
= (TREE_USED (decl
)
1328 || (TREE_CODE (decl
) != SAVE_EXPR
&& DECL_INITIAL (decl
) != 0));
1330 context
= decl_function_context (decl
);
1332 /* Get the current rtl used for this object and its original mode. */
1333 reg
= (TREE_CODE (decl
) == SAVE_EXPR
1334 ? SAVE_EXPR_RTL (decl
)
1335 : DECL_RTL_IF_SET (decl
));
1337 /* No need to do anything if decl has no rtx yet
1338 since in that case caller is setting TREE_ADDRESSABLE
1339 and a stack slot will be assigned when the rtl is made. */
1343 /* Get the declared mode for this object. */
1344 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1345 : DECL_MODE (decl
));
1346 /* Get the mode it's actually stored in. */
1347 promoted_mode
= GET_MODE (reg
);
1349 /* If this variable comes from an outer function, find that
1350 function's saved context. Don't use find_function_data here,
1351 because it might not be in any active function.
1352 FIXME: Is that really supposed to happen?
1353 It does in ObjC at least. */
1354 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1355 for (function
= outer_function_chain
; function
; function
= function
->outer
)
1356 if (function
->decl
== context
)
1359 /* If this is a variable-size object with a pseudo to address it,
1360 put that pseudo into the stack, if the var is nonlocal. */
1361 if (TREE_CODE (decl
) != SAVE_EXPR
&& DECL_NONLOCAL (decl
)
1362 && GET_CODE (reg
) == MEM
1363 && GET_CODE (XEXP (reg
, 0)) == REG
1364 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1366 reg
= XEXP (reg
, 0);
1367 decl_mode
= promoted_mode
= GET_MODE (reg
);
1373 /* FIXME make it work for promoted modes too */
1374 && decl_mode
== promoted_mode
1375 #ifdef NON_SAVING_SETJMP
1376 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1380 /* If we can't use ADDRESSOF, make sure we see through one we already
1382 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1383 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1384 reg
= XEXP (XEXP (reg
, 0), 0);
1386 /* Now we should have a value that resides in one or more pseudo regs. */
1388 if (GET_CODE (reg
) == REG
)
1390 /* If this variable lives in the current function and we don't need
1391 to put things in the stack for the sake of setjmp, try to keep it
1392 in a register until we know we actually need the address. */
1393 if (can_use_addressof
)
1394 gen_mem_addressof (reg
, decl
, rescan
);
1396 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
), promoted_mode
,
1397 decl_mode
, volatilep
, 0, usedp
, 0);
1399 else if (GET_CODE (reg
) == CONCAT
)
1401 /* A CONCAT contains two pseudos; put them both in the stack.
1402 We do it so they end up consecutive.
1403 We fixup references to the parts only after we fixup references
1404 to the whole CONCAT, lest we do double fixups for the latter
1406 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1407 tree part_type
= (*lang_hooks
.types
.type_for_mode
) (part_mode
, 0);
1408 rtx lopart
= XEXP (reg
, 0);
1409 rtx hipart
= XEXP (reg
, 1);
1410 #ifdef FRAME_GROWS_DOWNWARD
1411 /* Since part 0 should have a lower address, do it second. */
1412 put_reg_into_stack (function
, hipart
, part_type
, part_mode
,
1413 part_mode
, volatilep
, 0, 0, 0);
1414 put_reg_into_stack (function
, lopart
, part_type
, part_mode
,
1415 part_mode
, volatilep
, 0, 0, 0);
1417 put_reg_into_stack (function
, lopart
, part_type
, part_mode
,
1418 part_mode
, volatilep
, 0, 0, 0);
1419 put_reg_into_stack (function
, hipart
, part_type
, part_mode
,
1420 part_mode
, volatilep
, 0, 0, 0);
1423 /* Change the CONCAT into a combined MEM for both parts. */
1424 PUT_CODE (reg
, MEM
);
1425 MEM_ATTRS (reg
) = 0;
1427 /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1428 already computed alias sets. Here we want to re-generate. */
1430 SET_DECL_RTL (decl
, NULL
);
1431 set_mem_attributes (reg
, decl
, 1);
1433 SET_DECL_RTL (decl
, reg
);
1435 /* The two parts are in memory order already.
1436 Use the lower parts address as ours. */
1437 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1438 /* Prevent sharing of rtl that might lose. */
1439 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1440 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1441 if (usedp
&& rescan
)
1443 schedule_fixup_var_refs (function
, reg
, TREE_TYPE (decl
),
1445 schedule_fixup_var_refs (function
, lopart
, part_type
, part_mode
, 0);
1446 schedule_fixup_var_refs (function
, hipart
, part_type
, part_mode
, 0);
1453 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1454 into the stack frame of FUNCTION (0 means the current function).
1455 DECL_MODE is the machine mode of the user-level data type.
1456 PROMOTED_MODE is the machine mode of the register.
1457 VOLATILE_P is nonzero if this is for a "volatile" decl.
1458 USED_P is nonzero if this reg might have already been used in an insn. */
1461 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1462 original_regno
, used_p
, ht
)
1463 struct function
*function
;
1466 enum machine_mode promoted_mode
, decl_mode
;
1468 unsigned int original_regno
;
1472 struct function
*func
= function
? function
: cfun
;
1474 unsigned int regno
= original_regno
;
1477 regno
= REGNO (reg
);
1479 if (regno
< func
->x_max_parm_reg
)
1480 new = func
->x_parm_reg_stack_loc
[regno
];
1483 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
), 0, func
);
1485 PUT_CODE (reg
, MEM
);
1486 PUT_MODE (reg
, decl_mode
);
1487 XEXP (reg
, 0) = XEXP (new, 0);
1488 MEM_ATTRS (reg
) = 0;
1489 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1490 MEM_VOLATILE_P (reg
) = volatile_p
;
1492 /* If this is a memory ref that contains aggregate components,
1493 mark it as such for cse and loop optimize. If we are reusing a
1494 previously generated stack slot, then we need to copy the bit in
1495 case it was set for other reasons. For instance, it is set for
1496 __builtin_va_alist. */
1499 MEM_SET_IN_STRUCT_P (reg
,
1500 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1501 set_mem_alias_set (reg
, get_alias_set (type
));
1505 schedule_fixup_var_refs (function
, reg
, type
, promoted_mode
, ht
);
1508 /* Make sure that all refs to the variable, previously made
1509 when it was a register, are fixed up to be valid again.
1510 See function above for meaning of arguments. */
1513 schedule_fixup_var_refs (function
, reg
, type
, promoted_mode
, ht
)
1514 struct function
*function
;
1517 enum machine_mode promoted_mode
;
1520 int unsigned_p
= type
? TREE_UNSIGNED (type
) : 0;
1524 struct var_refs_queue
*temp
;
1527 = (struct var_refs_queue
*) ggc_alloc (sizeof (struct var_refs_queue
));
1528 temp
->modified
= reg
;
1529 temp
->promoted_mode
= promoted_mode
;
1530 temp
->unsignedp
= unsigned_p
;
1531 temp
->next
= function
->fixup_var_refs_queue
;
1532 function
->fixup_var_refs_queue
= temp
;
1535 /* Variable is local; fix it up now. */
1536 fixup_var_refs (reg
, promoted_mode
, unsigned_p
, reg
, ht
);
1540 fixup_var_refs (var
, promoted_mode
, unsignedp
, may_share
, ht
)
1542 enum machine_mode promoted_mode
;
1548 rtx first_insn
= get_insns ();
1549 struct sequence_stack
*stack
= seq_stack
;
1550 tree rtl_exps
= rtl_expr_chain
;
1552 /* If there's a hash table, it must record all uses of VAR. */
1557 fixup_var_refs_insns_with_hash (ht
, var
, promoted_mode
, unsignedp
,
1562 fixup_var_refs_insns (first_insn
, var
, promoted_mode
, unsignedp
,
1563 stack
== 0, may_share
);
1565 /* Scan all pending sequences too. */
1566 for (; stack
; stack
= stack
->next
)
1568 push_to_full_sequence (stack
->first
, stack
->last
);
1569 fixup_var_refs_insns (stack
->first
, var
, promoted_mode
, unsignedp
,
1570 stack
->next
!= 0, may_share
);
1571 /* Update remembered end of sequence
1572 in case we added an insn at the end. */
1573 stack
->last
= get_last_insn ();
1577 /* Scan all waiting RTL_EXPRs too. */
1578 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1580 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1581 if (seq
!= const0_rtx
&& seq
!= 0)
1583 push_to_sequence (seq
);
1584 fixup_var_refs_insns (seq
, var
, promoted_mode
, unsignedp
, 0,
1591 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1592 some part of an insn. Return a struct fixup_replacement whose OLD
1593 value is equal to X. Allocate a new structure if no such entry exists. */
1595 static struct fixup_replacement
*
1596 find_fixup_replacement (replacements
, x
)
1597 struct fixup_replacement
**replacements
;
1600 struct fixup_replacement
*p
;
1602 /* See if we have already replaced this. */
1603 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1608 p
= (struct fixup_replacement
*) xmalloc (sizeof (struct fixup_replacement
));
1611 p
->next
= *replacements
;
1618 /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1619 up. TOPLEVEL is nonzero if this chain is the main chain of insns
1620 for the current function. MAY_SHARE is either a MEM that is not
1621 to be unshared or a list of them. */
1624 fixup_var_refs_insns (insn
, var
, promoted_mode
, unsignedp
, toplevel
, may_share
)
1627 enum machine_mode promoted_mode
;
1634 /* fixup_var_refs_insn might modify insn, so save its next
1636 rtx next
= NEXT_INSN (insn
);
1638 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1639 the three sequences they (potentially) contain, and process
1640 them recursively. The CALL_INSN itself is not interesting. */
1642 if (GET_CODE (insn
) == CALL_INSN
1643 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
1647 /* Look at the Normal call, sibling call and tail recursion
1648 sequences attached to the CALL_PLACEHOLDER. */
1649 for (i
= 0; i
< 3; i
++)
1651 rtx seq
= XEXP (PATTERN (insn
), i
);
1654 push_to_sequence (seq
);
1655 fixup_var_refs_insns (seq
, var
, promoted_mode
, unsignedp
, 0,
1657 XEXP (PATTERN (insn
), i
) = get_insns ();
1663 else if (INSN_P (insn
))
1664 fixup_var_refs_insn (insn
, var
, promoted_mode
, unsignedp
, toplevel
,
1671 /* Look up the insns which reference VAR in HT and fix them up. Other
1672 arguments are the same as fixup_var_refs_insns.
1674 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1675 because the hash table will point straight to the interesting insn
1676 (inside the CALL_PLACEHOLDER). */
1679 fixup_var_refs_insns_with_hash (ht
, var
, promoted_mode
, unsignedp
, may_share
)
1682 enum machine_mode promoted_mode
;
1686 struct insns_for_mem_entry tmp
;
1687 struct insns_for_mem_entry
*ime
;
1691 ime
= (struct insns_for_mem_entry
*) htab_find (ht
, &tmp
);
1692 for (insn_list
= ime
->insns
; insn_list
!= 0; insn_list
= XEXP (insn_list
, 1))
1693 if (INSN_P (XEXP (insn_list
, 0)))
1694 fixup_var_refs_insn (XEXP (insn_list
, 0), var
, promoted_mode
,
1695 unsignedp
, 1, may_share
);
1699 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1700 the insn under examination, VAR is the variable to fix up
1701 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1702 TOPLEVEL is nonzero if this is the main insn chain for this
1706 fixup_var_refs_insn (insn
, var
, promoted_mode
, unsignedp
, toplevel
, no_share
)
1709 enum machine_mode promoted_mode
;
1715 rtx set
, prev
, prev_set
;
1718 /* Remember the notes in case we delete the insn. */
1719 note
= REG_NOTES (insn
);
1721 /* If this is a CLOBBER of VAR, delete it.
1723 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1724 and REG_RETVAL notes too. */
1725 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1726 && (XEXP (PATTERN (insn
), 0) == var
1727 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1728 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1729 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1731 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1732 /* The REG_LIBCALL note will go away since we are going to
1733 turn INSN into a NOTE, so just delete the
1734 corresponding REG_RETVAL note. */
1735 remove_note (XEXP (note
, 0),
1736 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1742 /* The insn to load VAR from a home in the arglist
1743 is now a no-op. When we see it, just delete it.
1744 Similarly if this is storing VAR from a register from which
1745 it was loaded in the previous insn. This will occur
1746 when an ADDRESSOF was made for an arglist slot. */
1748 && (set
= single_set (insn
)) != 0
1749 && SET_DEST (set
) == var
1750 /* If this represents the result of an insn group,
1751 don't delete the insn. */
1752 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1753 && (rtx_equal_p (SET_SRC (set
), var
)
1754 || (GET_CODE (SET_SRC (set
)) == REG
1755 && (prev
= prev_nonnote_insn (insn
)) != 0
1756 && (prev_set
= single_set (prev
)) != 0
1757 && SET_DEST (prev_set
) == SET_SRC (set
)
1758 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1764 struct fixup_replacement
*replacements
= 0;
1765 rtx next_insn
= NEXT_INSN (insn
);
1767 if (SMALL_REGISTER_CLASSES
)
1769 /* If the insn that copies the results of a CALL_INSN
1770 into a pseudo now references VAR, we have to use an
1771 intermediate pseudo since we want the life of the
1772 return value register to be only a single insn.
1774 If we don't use an intermediate pseudo, such things as
1775 address computations to make the address of VAR valid
1776 if it is not can be placed between the CALL_INSN and INSN.
1778 To make sure this doesn't happen, we record the destination
1779 of the CALL_INSN and see if the next insn uses both that
1782 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1783 && reg_mentioned_p (var
, PATTERN (insn
))
1784 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1786 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1788 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1790 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1794 if (GET_CODE (insn
) == CALL_INSN
1795 && GET_CODE (PATTERN (insn
)) == SET
)
1796 call_dest
= SET_DEST (PATTERN (insn
));
1797 else if (GET_CODE (insn
) == CALL_INSN
1798 && GET_CODE (PATTERN (insn
)) == PARALLEL
1799 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1800 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1805 /* See if we have to do anything to INSN now that VAR is in
1806 memory. If it needs to be loaded into a pseudo, use a single
1807 pseudo for the entire insn in case there is a MATCH_DUP
1808 between two operands. We pass a pointer to the head of
1809 a list of struct fixup_replacements. If fixup_var_refs_1
1810 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1811 it will record them in this list.
1813 If it allocated a pseudo for any replacement, we copy into
1816 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1817 &replacements
, no_share
);
1819 /* If this is last_parm_insn, and any instructions were output
1820 after it to fix it up, then we must set last_parm_insn to
1821 the last such instruction emitted. */
1822 if (insn
== last_parm_insn
)
1823 last_parm_insn
= PREV_INSN (next_insn
);
1825 while (replacements
)
1827 struct fixup_replacement
*next
;
1829 if (GET_CODE (replacements
->new) == REG
)
1834 /* OLD might be a (subreg (mem)). */
1835 if (GET_CODE (replacements
->old
) == SUBREG
)
1837 = fixup_memory_subreg (replacements
->old
, insn
,
1841 = fixup_stack_1 (replacements
->old
, insn
);
1843 insert_before
= insn
;
1845 /* If we are changing the mode, do a conversion.
1846 This might be wasteful, but combine.c will
1847 eliminate much of the waste. */
1849 if (GET_MODE (replacements
->new)
1850 != GET_MODE (replacements
->old
))
1853 convert_move (replacements
->new,
1854 replacements
->old
, unsignedp
);
1859 seq
= gen_move_insn (replacements
->new,
1862 emit_insn_before (seq
, insert_before
);
1865 next
= replacements
->next
;
1866 free (replacements
);
1867 replacements
= next
;
1871 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1872 But don't touch other insns referred to by reg-notes;
1873 we will get them elsewhere. */
1876 if (GET_CODE (note
) != INSN_LIST
)
1878 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
,
1880 note
= XEXP (note
, 1);
1884 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1885 See if the rtx expression at *LOC in INSN needs to be changed.
1887 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1888 contain a list of original rtx's and replacements. If we find that we need
1889 to modify this insn by replacing a memory reference with a pseudo or by
1890 making a new MEM to implement a SUBREG, we consult that list to see if
1891 we have already chosen a replacement. If none has already been allocated,
1892 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1893 or the SUBREG, as appropriate, to the pseudo. */
1896 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
, no_share
)
1898 enum machine_mode promoted_mode
;
1901 struct fixup_replacement
**replacements
;
1906 RTX_CODE code
= GET_CODE (x
);
1909 struct fixup_replacement
*replacement
;
1914 if (XEXP (x
, 0) == var
)
1916 /* Prevent sharing of rtl that might lose. */
1917 rtx sub
= copy_rtx (XEXP (var
, 0));
1919 if (! validate_change (insn
, loc
, sub
, 0))
1921 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1924 /* We should be able to replace with a register or all is lost.
1925 Note that we can't use validate_change to verify this, since
1926 we're not caring for replacing all dups simultaneously. */
1927 if (! validate_replace_rtx (*loc
, y
, insn
))
1930 /* Careful! First try to recognize a direct move of the
1931 value, mimicking how things are done in gen_reload wrt
1932 PLUS. Consider what happens when insn is a conditional
1933 move instruction and addsi3 clobbers flags. */
1936 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1940 if (recog_memoized (new_insn
) < 0)
1942 /* That failed. Fall back on force_operand and hope. */
1945 sub
= force_operand (sub
, y
);
1947 emit_insn (gen_move_insn (y
, sub
));
1953 /* Don't separate setter from user. */
1954 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1955 insn
= PREV_INSN (insn
);
1958 emit_insn_before (seq
, insn
);
1966 /* If we already have a replacement, use it. Otherwise,
1967 try to fix up this address in case it is invalid. */
1969 replacement
= find_fixup_replacement (replacements
, var
);
1970 if (replacement
->new)
1972 *loc
= replacement
->new;
1976 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1978 /* Unless we are forcing memory to register or we changed the mode,
1979 we can leave things the way they are if the insn is valid. */
1981 INSN_CODE (insn
) = -1;
1982 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1983 && recog_memoized (insn
) >= 0)
1986 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1990 /* If X contains VAR, we need to unshare it here so that we update
1991 each occurrence separately. But all identical MEMs in one insn
1992 must be replaced with the same rtx because of the possibility of
1995 if (reg_mentioned_p (var
, x
))
1997 replacement
= find_fixup_replacement (replacements
, x
);
1998 if (replacement
->new == 0)
1999 replacement
->new = copy_most_rtx (x
, no_share
);
2001 *loc
= x
= replacement
->new;
2002 code
= GET_CODE (x
);
2019 /* Note that in some cases those types of expressions are altered
2020 by optimize_bit_field, and do not survive to get here. */
2021 if (XEXP (x
, 0) == var
2022 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2023 && SUBREG_REG (XEXP (x
, 0)) == var
))
2025 /* Get TEM as a valid MEM in the mode presently in the insn.
2027 We don't worry about the possibility of MATCH_DUP here; it
2028 is highly unlikely and would be tricky to handle. */
2031 if (GET_CODE (tem
) == SUBREG
)
2033 if (GET_MODE_BITSIZE (GET_MODE (tem
))
2034 > GET_MODE_BITSIZE (GET_MODE (var
)))
2036 replacement
= find_fixup_replacement (replacements
, var
);
2037 if (replacement
->new == 0)
2038 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2039 SUBREG_REG (tem
) = replacement
->new;
2041 /* The following code works only if we have a MEM, so we
2042 need to handle the subreg here. We directly substitute
2043 it assuming that a subreg must be OK here. We already
2044 scheduled a replacement to copy the mem into the
2050 tem
= fixup_memory_subreg (tem
, insn
, promoted_mode
, 0);
2053 tem
= fixup_stack_1 (tem
, insn
);
2055 /* Unless we want to load from memory, get TEM into the proper mode
2056 for an extract from memory. This can only be done if the
2057 extract is at a constant position and length. */
2059 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
2060 && GET_CODE (XEXP (x
, 2)) == CONST_INT
2061 && ! mode_dependent_address_p (XEXP (tem
, 0))
2062 && ! MEM_VOLATILE_P (tem
))
2064 enum machine_mode wanted_mode
= VOIDmode
;
2065 enum machine_mode is_mode
= GET_MODE (tem
);
2066 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
2068 if (GET_CODE (x
) == ZERO_EXTRACT
)
2070 enum machine_mode new_mode
2071 = mode_for_extraction (EP_extzv
, 1);
2072 if (new_mode
!= MAX_MACHINE_MODE
)
2073 wanted_mode
= new_mode
;
2075 else if (GET_CODE (x
) == SIGN_EXTRACT
)
2077 enum machine_mode new_mode
2078 = mode_for_extraction (EP_extv
, 1);
2079 if (new_mode
!= MAX_MACHINE_MODE
)
2080 wanted_mode
= new_mode
;
2083 /* If we have a narrower mode, we can do something. */
2084 if (wanted_mode
!= VOIDmode
2085 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2087 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2088 rtx old_pos
= XEXP (x
, 2);
2091 /* If the bytes and bits are counted differently, we
2092 must adjust the offset. */
2093 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2094 offset
= (GET_MODE_SIZE (is_mode
)
2095 - GET_MODE_SIZE (wanted_mode
) - offset
);
2097 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2099 newmem
= adjust_address_nv (tem
, wanted_mode
, offset
);
2101 /* Make the change and see if the insn remains valid. */
2102 INSN_CODE (insn
) = -1;
2103 XEXP (x
, 0) = newmem
;
2104 XEXP (x
, 2) = GEN_INT (pos
);
2106 if (recog_memoized (insn
) >= 0)
2109 /* Otherwise, restore old position. XEXP (x, 0) will be
2111 XEXP (x
, 2) = old_pos
;
2115 /* If we get here, the bitfield extract insn can't accept a memory
2116 reference. Copy the input into a register. */
2118 tem1
= gen_reg_rtx (GET_MODE (tem
));
2119 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2126 if (SUBREG_REG (x
) == var
)
2128 /* If this is a special SUBREG made because VAR was promoted
2129 from a wider mode, replace it with VAR and call ourself
2130 recursively, this time saying that the object previously
2131 had its current mode (by virtue of the SUBREG). */
2133 if (SUBREG_PROMOTED_VAR_P (x
))
2136 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
,
2141 /* If this SUBREG makes VAR wider, it has become a paradoxical
2142 SUBREG with VAR in memory, but these aren't allowed at this
2143 stage of the compilation. So load VAR into a pseudo and take
2144 a SUBREG of that pseudo. */
2145 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2147 replacement
= find_fixup_replacement (replacements
, var
);
2148 if (replacement
->new == 0)
2149 replacement
->new = gen_reg_rtx (promoted_mode
);
2150 SUBREG_REG (x
) = replacement
->new;
2154 /* See if we have already found a replacement for this SUBREG.
2155 If so, use it. Otherwise, make a MEM and see if the insn
2156 is recognized. If not, or if we should force MEM into a register,
2157 make a pseudo for this SUBREG. */
2158 replacement
= find_fixup_replacement (replacements
, x
);
2159 if (replacement
->new)
2161 *loc
= replacement
->new;
2165 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
,
2168 INSN_CODE (insn
) = -1;
2169 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2172 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2178 /* First do special simplification of bit-field references. */
2179 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2180 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2181 optimize_bit_field (x
, insn
, 0);
2182 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2183 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2184 optimize_bit_field (x
, insn
, 0);
2186 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2187 into a register and then store it back out. */
2188 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2189 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2190 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2191 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2192 > GET_MODE_SIZE (GET_MODE (var
))))
2194 replacement
= find_fixup_replacement (replacements
, var
);
2195 if (replacement
->new == 0)
2196 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2198 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2199 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2202 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2203 insn into a pseudo and store the low part of the pseudo into VAR. */
2204 if (GET_CODE (SET_DEST (x
)) == SUBREG
2205 && SUBREG_REG (SET_DEST (x
)) == var
2206 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2207 > GET_MODE_SIZE (GET_MODE (var
))))
2209 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2210 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2217 rtx dest
= SET_DEST (x
);
2218 rtx src
= SET_SRC (x
);
2219 rtx outerdest
= dest
;
2221 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2222 || GET_CODE (dest
) == SIGN_EXTRACT
2223 || GET_CODE (dest
) == ZERO_EXTRACT
)
2224 dest
= XEXP (dest
, 0);
2226 if (GET_CODE (src
) == SUBREG
)
2227 src
= SUBREG_REG (src
);
2229 /* If VAR does not appear at the top level of the SET
2230 just scan the lower levels of the tree. */
2232 if (src
!= var
&& dest
!= var
)
2235 /* We will need to rerecognize this insn. */
2236 INSN_CODE (insn
) = -1;
2238 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
2239 && mode_for_extraction (EP_insv
, -1) != MAX_MACHINE_MODE
)
2241 /* Since this case will return, ensure we fixup all the
2243 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2244 insn
, replacements
, no_share
);
2245 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2246 insn
, replacements
, no_share
);
2247 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2248 insn
, replacements
, no_share
);
2250 tem
= XEXP (outerdest
, 0);
2252 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2253 that may appear inside a ZERO_EXTRACT.
2254 This was legitimate when the MEM was a REG. */
2255 if (GET_CODE (tem
) == SUBREG
2256 && SUBREG_REG (tem
) == var
)
2257 tem
= fixup_memory_subreg (tem
, insn
, promoted_mode
, 0);
2259 tem
= fixup_stack_1 (tem
, insn
);
2261 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2262 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2263 && ! mode_dependent_address_p (XEXP (tem
, 0))
2264 && ! MEM_VOLATILE_P (tem
))
2266 enum machine_mode wanted_mode
;
2267 enum machine_mode is_mode
= GET_MODE (tem
);
2268 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2270 wanted_mode
= mode_for_extraction (EP_insv
, 0);
2272 /* If we have a narrower mode, we can do something. */
2273 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2275 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2276 rtx old_pos
= XEXP (outerdest
, 2);
2279 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2280 offset
= (GET_MODE_SIZE (is_mode
)
2281 - GET_MODE_SIZE (wanted_mode
) - offset
);
2283 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2285 newmem
= adjust_address_nv (tem
, wanted_mode
, offset
);
2287 /* Make the change and see if the insn remains valid. */
2288 INSN_CODE (insn
) = -1;
2289 XEXP (outerdest
, 0) = newmem
;
2290 XEXP (outerdest
, 2) = GEN_INT (pos
);
2292 if (recog_memoized (insn
) >= 0)
2295 /* Otherwise, restore old position. XEXP (x, 0) will be
2297 XEXP (outerdest
, 2) = old_pos
;
2301 /* If we get here, the bit-field store doesn't allow memory
2302 or isn't located at a constant position. Load the value into
2303 a register, do the store, and put it back into memory. */
2305 tem1
= gen_reg_rtx (GET_MODE (tem
));
2306 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2307 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2308 XEXP (outerdest
, 0) = tem1
;
2312 /* STRICT_LOW_PART is a no-op on memory references
2313 and it can cause combinations to be unrecognizable,
2316 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2317 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2319 /* A valid insn to copy VAR into or out of a register
2320 must be left alone, to avoid an infinite loop here.
2321 If the reference to VAR is by a subreg, fix that up,
2322 since SUBREG is not valid for a memref.
2323 Also fix up the address of the stack slot.
2325 Note that we must not try to recognize the insn until
2326 after we know that we have valid addresses and no
2327 (subreg (mem ...) ...) constructs, since these interfere
2328 with determining the validity of the insn. */
2330 if ((SET_SRC (x
) == var
2331 || (GET_CODE (SET_SRC (x
)) == SUBREG
2332 && SUBREG_REG (SET_SRC (x
)) == var
))
2333 && (GET_CODE (SET_DEST (x
)) == REG
2334 || (GET_CODE (SET_DEST (x
)) == SUBREG
2335 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2336 && GET_MODE (var
) == promoted_mode
2337 && x
== single_set (insn
))
2341 if (GET_CODE (SET_SRC (x
)) == SUBREG
2342 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
2343 > GET_MODE_SIZE (GET_MODE (var
))))
2345 /* This (subreg VAR) is now a paradoxical subreg. We need
2346 to replace VAR instead of the subreg. */
2347 replacement
= find_fixup_replacement (replacements
, var
);
2348 if (replacement
->new == NULL_RTX
)
2349 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2350 SUBREG_REG (SET_SRC (x
)) = replacement
->new;
2354 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2355 if (replacement
->new)
2356 SET_SRC (x
) = replacement
->new;
2357 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2358 SET_SRC (x
) = replacement
->new
2359 = fixup_memory_subreg (SET_SRC (x
), insn
, promoted_mode
,
2362 SET_SRC (x
) = replacement
->new
2363 = fixup_stack_1 (SET_SRC (x
), insn
);
2366 if (recog_memoized (insn
) >= 0)
2369 /* INSN is not valid, but we know that we want to
2370 copy SET_SRC (x) to SET_DEST (x) in some way. So
2371 we generate the move and see whether it requires more
2372 than one insn. If it does, we emit those insns and
2373 delete INSN. Otherwise, we can just replace the pattern
2374 of INSN; we have already verified above that INSN has
2375 no other function that to do X. */
2377 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2378 if (NEXT_INSN (pat
) != NULL_RTX
)
2380 last
= emit_insn_before (pat
, insn
);
2382 /* INSN might have REG_RETVAL or other important notes, so
2383 we need to store the pattern of the last insn in the
2384 sequence into INSN similarly to the normal case. LAST
2385 should not have REG_NOTES, but we allow them if INSN has
2387 if (REG_NOTES (last
) && REG_NOTES (insn
))
2389 if (REG_NOTES (last
))
2390 REG_NOTES (insn
) = REG_NOTES (last
);
2391 PATTERN (insn
) = PATTERN (last
);
2396 PATTERN (insn
) = PATTERN (pat
);
2401 if ((SET_DEST (x
) == var
2402 || (GET_CODE (SET_DEST (x
)) == SUBREG
2403 && SUBREG_REG (SET_DEST (x
)) == var
))
2404 && (GET_CODE (SET_SRC (x
)) == REG
2405 || (GET_CODE (SET_SRC (x
)) == SUBREG
2406 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2407 && GET_MODE (var
) == promoted_mode
2408 && x
== single_set (insn
))
2412 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2413 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
,
2416 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2418 if (recog_memoized (insn
) >= 0)
2421 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2422 if (NEXT_INSN (pat
) != NULL_RTX
)
2424 last
= emit_insn_before (pat
, insn
);
2426 /* INSN might have REG_RETVAL or other important notes, so
2427 we need to store the pattern of the last insn in the
2428 sequence into INSN similarly to the normal case. LAST
2429 should not have REG_NOTES, but we allow them if INSN has
2431 if (REG_NOTES (last
) && REG_NOTES (insn
))
2433 if (REG_NOTES (last
))
2434 REG_NOTES (insn
) = REG_NOTES (last
);
2435 PATTERN (insn
) = PATTERN (last
);
2440 PATTERN (insn
) = PATTERN (pat
);
2445 /* Otherwise, storing into VAR must be handled specially
2446 by storing into a temporary and copying that into VAR
2447 with a new insn after this one. Note that this case
2448 will be used when storing into a promoted scalar since
2449 the insn will now have different modes on the input
2450 and output and hence will be invalid (except for the case
2451 of setting it to a constant, which does not need any
2452 change if it is valid). We generate extra code in that case,
2453 but combine.c will eliminate it. */
2458 rtx fixeddest
= SET_DEST (x
);
2459 enum machine_mode temp_mode
;
2461 /* STRICT_LOW_PART can be discarded, around a MEM. */
2462 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2463 fixeddest
= XEXP (fixeddest
, 0);
2464 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2465 if (GET_CODE (fixeddest
) == SUBREG
)
2467 fixeddest
= fixup_memory_subreg (fixeddest
, insn
,
2469 temp_mode
= GET_MODE (fixeddest
);
2473 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2474 temp_mode
= promoted_mode
;
2477 temp
= gen_reg_rtx (temp_mode
);
2479 emit_insn_after (gen_move_insn (fixeddest
,
2480 gen_lowpart (GET_MODE (fixeddest
),
2484 SET_DEST (x
) = temp
;
2492 /* Nothing special about this RTX; fix its operands. */
2494 fmt
= GET_RTX_FORMAT (code
);
2495 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2498 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
,
2500 else if (fmt
[i
] == 'E')
2503 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2504 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2505 insn
, replacements
, no_share
);
2510 /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2511 The REG was placed on the stack, so X now has the form (SUBREG:m1
2514 Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
2515 must be emitted to compute NEWADDR, put them before INSN.
2517 UNCRITICAL nonzero means accept paradoxical subregs.
2518 This is used for subregs found inside REG_NOTES. */
2521 fixup_memory_subreg (x
, insn
, promoted_mode
, uncritical
)
2524 enum machine_mode promoted_mode
;
2528 rtx mem
= SUBREG_REG (x
);
2529 rtx addr
= XEXP (mem
, 0);
2530 enum machine_mode mode
= GET_MODE (x
);
2533 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2534 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (mem
)) && ! uncritical
)
2537 offset
= SUBREG_BYTE (x
);
2538 if (BYTES_BIG_ENDIAN
)
2539 /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2540 the offset so that it points to the right location within the
2542 offset
-= (GET_MODE_SIZE (promoted_mode
) - GET_MODE_SIZE (GET_MODE (mem
)));
2544 if (!flag_force_addr
2545 && memory_address_p (mode
, plus_constant (addr
, offset
)))
2546 /* Shortcut if no insns need be emitted. */
2547 return adjust_address (mem
, mode
, offset
);
2550 result
= adjust_address (mem
, mode
, offset
);
2554 emit_insn_before (seq
, insn
);
2558 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2559 Replace subexpressions of X in place.
2560 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2561 Otherwise return X, with its contents possibly altered.
2563 INSN, PROMOTED_MODE and UNCRITICAL are as for
2564 fixup_memory_subreg. */
2567 walk_fixup_memory_subreg (x
, insn
, promoted_mode
, uncritical
)
2570 enum machine_mode promoted_mode
;
2580 code
= GET_CODE (x
);
2582 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2583 return fixup_memory_subreg (x
, insn
, promoted_mode
, uncritical
);
2585 /* Nothing special about this RTX; fix its operands. */
2587 fmt
= GET_RTX_FORMAT (code
);
2588 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2591 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
,
2592 promoted_mode
, uncritical
);
2593 else if (fmt
[i
] == 'E')
2596 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2598 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
,
2599 promoted_mode
, uncritical
);
2605 /* For each memory ref within X, if it refers to a stack slot
2606 with an out of range displacement, put the address in a temp register
2607 (emitting new insns before INSN to load these registers)
2608 and alter the memory ref to use that register.
2609 Replace each such MEM rtx with a copy, to avoid clobberage. */
2612 fixup_stack_1 (x
, insn
)
2617 RTX_CODE code
= GET_CODE (x
);
2622 rtx ad
= XEXP (x
, 0);
2623 /* If we have address of a stack slot but it's not valid
2624 (displacement is too large), compute the sum in a register. */
2625 if (GET_CODE (ad
) == PLUS
2626 && GET_CODE (XEXP (ad
, 0)) == REG
2627 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2628 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2629 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2630 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2631 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2633 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2634 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2635 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2636 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2639 if (memory_address_p (GET_MODE (x
), ad
))
2643 temp
= copy_to_reg (ad
);
2646 emit_insn_before (seq
, insn
);
2647 return replace_equiv_address (x
, temp
);
2652 fmt
= GET_RTX_FORMAT (code
);
2653 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2656 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2657 else if (fmt
[i
] == 'E')
2660 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2661 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2667 /* Optimization: a bit-field instruction whose field
2668 happens to be a byte or halfword in memory
2669 can be changed to a move instruction.
2671 We call here when INSN is an insn to examine or store into a bit-field.
2672 BODY is the SET-rtx to be altered.
2674 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2675 (Currently this is called only from function.c, and EQUIV_MEM
2679 optimize_bit_field (body
, insn
, equiv_mem
)
2687 enum machine_mode mode
;
2689 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2690 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2691 bitfield
= SET_DEST (body
), destflag
= 1;
2693 bitfield
= SET_SRC (body
), destflag
= 0;
2695 /* First check that the field being stored has constant size and position
2696 and is in fact a byte or halfword suitably aligned. */
2698 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2699 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2700 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2702 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2706 /* Now check that the containing word is memory, not a register,
2707 and that it is safe to change the machine mode. */
2709 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2710 memref
= XEXP (bitfield
, 0);
2711 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2713 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2714 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2715 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2716 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2717 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2719 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2720 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2723 && ! mode_dependent_address_p (XEXP (memref
, 0))
2724 && ! MEM_VOLATILE_P (memref
))
2726 /* Now adjust the address, first for any subreg'ing
2727 that we are now getting rid of,
2728 and then for which byte of the word is wanted. */
2730 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2733 /* Adjust OFFSET to count bits from low-address byte. */
2734 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2735 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2736 - offset
- INTVAL (XEXP (bitfield
, 1)));
2738 /* Adjust OFFSET to count bytes from low-address byte. */
2739 offset
/= BITS_PER_UNIT
;
2740 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2742 offset
+= (SUBREG_BYTE (XEXP (bitfield
, 0))
2743 / UNITS_PER_WORD
) * UNITS_PER_WORD
;
2744 if (BYTES_BIG_ENDIAN
)
2745 offset
-= (MIN (UNITS_PER_WORD
,
2746 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2747 - MIN (UNITS_PER_WORD
,
2748 GET_MODE_SIZE (GET_MODE (memref
))));
2752 memref
= adjust_address (memref
, mode
, offset
);
2753 insns
= get_insns ();
2755 emit_insn_before (insns
, insn
);
2757 /* Store this memory reference where
2758 we found the bit field reference. */
2762 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2763 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2765 rtx src
= SET_SRC (body
);
2766 while (GET_CODE (src
) == SUBREG
2767 && SUBREG_BYTE (src
) == 0)
2768 src
= SUBREG_REG (src
);
2769 if (GET_MODE (src
) != GET_MODE (memref
))
2770 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2771 validate_change (insn
, &SET_SRC (body
), src
, 1);
2773 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2774 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2775 /* This shouldn't happen because anything that didn't have
2776 one of these modes should have got converted explicitly
2777 and then referenced through a subreg.
2778 This is so because the original bit-field was
2779 handled by agg_mode and so its tree structure had
2780 the same mode that memref now has. */
2785 rtx dest
= SET_DEST (body
);
2787 while (GET_CODE (dest
) == SUBREG
2788 && SUBREG_BYTE (dest
) == 0
2789 && (GET_MODE_CLASS (GET_MODE (dest
))
2790 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2791 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2793 dest
= SUBREG_REG (dest
);
2795 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2797 if (GET_MODE (dest
) == GET_MODE (memref
))
2798 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2801 /* Convert the mem ref to the destination mode. */
2802 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2805 convert_move (newreg
, memref
,
2806 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2810 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2814 /* See if we can convert this extraction or insertion into
2815 a simple move insn. We might not be able to do so if this
2816 was, for example, part of a PARALLEL.
2818 If we succeed, write out any needed conversions. If we fail,
2819 it is hard to guess why we failed, so don't do anything
2820 special; just let the optimization be suppressed. */
2822 if (apply_change_group () && seq
)
2823 emit_insn_before (seq
, insn
);
2828 /* These routines are responsible for converting virtual register references
2829 to the actual hard register references once RTL generation is complete.
2831 The following four variables are used for communication between the
2832 routines. They contain the offsets of the virtual registers from their
2833 respective hard registers. */
2835 static int in_arg_offset
;
2836 static int var_offset
;
2837 static int dynamic_offset
;
2838 static int out_arg_offset
;
2839 static int cfa_offset
;
2841 /* In most machines, the stack pointer register is equivalent to the bottom
2844 #ifndef STACK_POINTER_OFFSET
2845 #define STACK_POINTER_OFFSET 0
2848 /* If not defined, pick an appropriate default for the offset of dynamically
2849 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2850 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2852 #ifndef STACK_DYNAMIC_OFFSET
2854 /* The bottom of the stack points to the actual arguments. If
2855 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2856 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2857 stack space for register parameters is not pushed by the caller, but
2858 rather part of the fixed stack areas and hence not included in
2859 `current_function_outgoing_args_size'. Nevertheless, we must allow
2860 for it when allocating stack dynamic objects. */
2862 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2863 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2864 ((ACCUMULATE_OUTGOING_ARGS \
2865 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2866 + (STACK_POINTER_OFFSET)) \
2869 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2870 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2871 + (STACK_POINTER_OFFSET))
2875 /* On most machines, the CFA coincides with the first incoming parm. */
2877 #ifndef ARG_POINTER_CFA_OFFSET
2878 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2881 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2882 had its address taken. DECL is the decl or SAVE_EXPR for the
2883 object stored in the register, for later use if we do need to force
2884 REG into the stack. REG is overwritten by the MEM like in
2885 put_reg_into_stack. RESCAN is true if previously emitted
2886 instructions must be rescanned and modified now that the REG has
2887 been transformed. */
2890 gen_mem_addressof (reg
, decl
, rescan
)
2895 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2898 /* Calculate this before we start messing with decl's RTL. */
2899 HOST_WIDE_INT set
= decl
? get_alias_set (decl
) : 0;
2901 /* If the original REG was a user-variable, then so is the REG whose
2902 address is being taken. Likewise for unchanging. */
2903 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2904 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2906 PUT_CODE (reg
, MEM
);
2907 MEM_ATTRS (reg
) = 0;
2912 tree type
= TREE_TYPE (decl
);
2913 enum machine_mode decl_mode
2914 = (DECL_P (decl
) ? DECL_MODE (decl
) : TYPE_MODE (TREE_TYPE (decl
)));
2915 rtx decl_rtl
= (TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
)
2916 : DECL_RTL_IF_SET (decl
));
2918 PUT_MODE (reg
, decl_mode
);
2920 /* Clear DECL_RTL momentarily so functions below will work
2921 properly, then set it again. */
2922 if (DECL_P (decl
) && decl_rtl
== reg
)
2923 SET_DECL_RTL (decl
, 0);
2925 set_mem_attributes (reg
, decl
, 1);
2926 set_mem_alias_set (reg
, set
);
2928 if (DECL_P (decl
) && decl_rtl
== reg
)
2929 SET_DECL_RTL (decl
, reg
);
2932 && (TREE_USED (decl
) || (DECL_P (decl
) && DECL_INITIAL (decl
) != 0)))
2933 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
), reg
, 0);
2936 fixup_var_refs (reg
, GET_MODE (reg
), 0, reg
, 0);
2941 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2944 flush_addressof (decl
)
2947 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2948 && DECL_RTL (decl
) != 0
2949 && GET_CODE (DECL_RTL (decl
)) == MEM
2950 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2951 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2952 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2955 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2958 put_addressof_into_stack (r
, ht
)
2963 int volatile_p
, used_p
;
2965 rtx reg
= XEXP (r
, 0);
2967 if (GET_CODE (reg
) != REG
)
2970 decl
= ADDRESSOF_DECL (r
);
2973 type
= TREE_TYPE (decl
);
2974 volatile_p
= (TREE_CODE (decl
) != SAVE_EXPR
2975 && TREE_THIS_VOLATILE (decl
));
2976 used_p
= (TREE_USED (decl
)
2977 || (DECL_P (decl
) && DECL_INITIAL (decl
) != 0));
2986 put_reg_into_stack (0, reg
, type
, GET_MODE (reg
), GET_MODE (reg
),
2987 volatile_p
, ADDRESSOF_REGNO (r
), used_p
, ht
);
2990 /* List of replacements made below in purge_addressof_1 when creating
2991 bitfield insertions. */
2992 static rtx purge_bitfield_addressof_replacements
;
2994 /* List of replacements made below in purge_addressof_1 for patterns
2995 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2996 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2997 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2998 enough in complex cases, e.g. when some field values can be
2999 extracted by usage MEM with narrower mode. */
3000 static rtx purge_addressof_replacements
;
3002 /* Helper function for purge_addressof. See if the rtx expression at *LOC
3003 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
3004 the stack. If the function returns FALSE then the replacement could not
3005 be made. If MAY_POSTPONE is true and we would not put the addressof
3006 to stack, postpone processing of the insn. */
3009 purge_addressof_1 (loc
, insn
, force
, store
, may_postpone
, ht
)
3012 int force
, store
, may_postpone
;
3021 /* Re-start here to avoid recursion in common cases. */
3028 code
= GET_CODE (x
);
3030 /* If we don't return in any of the cases below, we will recurse inside
3031 the RTX, which will normally result in any ADDRESSOF being forced into
3035 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1,
3037 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0,
3041 else if (code
== ADDRESSOF
)
3045 if (GET_CODE (XEXP (x
, 0)) != MEM
)
3046 put_addressof_into_stack (x
, ht
);
3048 /* We must create a copy of the rtx because it was created by
3049 overwriting a REG rtx which is always shared. */
3050 sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
3051 if (validate_change (insn
, loc
, sub
, 0)
3052 || validate_replace_rtx (x
, sub
, insn
))
3057 /* If SUB is a hard or virtual register, try it as a pseudo-register.
3058 Otherwise, perhaps SUB is an expression, so generate code to compute
3060 if (GET_CODE (sub
) == REG
&& REGNO (sub
) <= LAST_VIRTUAL_REGISTER
)
3061 sub
= copy_to_reg (sub
);
3063 sub
= force_operand (sub
, NULL_RTX
);
3065 if (! validate_change (insn
, loc
, sub
, 0)
3066 && ! validate_replace_rtx (x
, sub
, insn
))
3069 insns
= get_insns ();
3071 emit_insn_before (insns
, insn
);
3075 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
3077 rtx sub
= XEXP (XEXP (x
, 0), 0);
3079 if (GET_CODE (sub
) == MEM
)
3080 sub
= adjust_address_nv (sub
, GET_MODE (x
), 0);
3081 else if (GET_CODE (sub
) == REG
3082 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
3084 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
3086 int size_x
, size_sub
;
3090 /* Postpone for now, so that we do not emit bitfield arithmetics
3091 unless there is some benefit from it. */
3092 if (!postponed_insns
|| XEXP (postponed_insns
, 0) != insn
)
3093 postponed_insns
= alloc_INSN_LIST (insn
, postponed_insns
);
3099 /* When processing REG_NOTES look at the list of
3100 replacements done on the insn to find the register that X
3104 for (tem
= purge_bitfield_addressof_replacements
;
3106 tem
= XEXP (XEXP (tem
, 1), 1))
3107 if (rtx_equal_p (x
, XEXP (tem
, 0)))
3109 *loc
= XEXP (XEXP (tem
, 1), 0);
3113 /* See comment for purge_addressof_replacements. */
3114 for (tem
= purge_addressof_replacements
;
3116 tem
= XEXP (XEXP (tem
, 1), 1))
3117 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3119 rtx z
= XEXP (XEXP (tem
, 1), 0);
3121 if (GET_MODE (x
) == GET_MODE (z
)
3122 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
3123 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
3126 /* It can happen that the note may speak of things
3127 in a wider (or just different) mode than the
3128 code did. This is especially true of
3131 if (GET_CODE (z
) == SUBREG
&& SUBREG_BYTE (z
) == 0)
3134 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
3135 && (GET_MODE_SIZE (GET_MODE (x
))
3136 > GET_MODE_SIZE (GET_MODE (z
))))
3138 /* This can occur as a result in invalid
3139 pointer casts, e.g. float f; ...
3140 *(long long int *)&f.
3141 ??? We could emit a warning here, but
3142 without a line number that wouldn't be
3144 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
3147 z
= gen_lowpart (GET_MODE (x
), z
);
3153 /* When we are processing the REG_NOTES of the last instruction
3154 of a libcall, there will be typically no replacements
3155 for that insn; the replacements happened before, piecemeal
3156 fashion. OTOH we are not interested in the details of
3157 this for the REG_EQUAL note, we want to know the big picture,
3158 which can be succinctly described with a simple SUBREG.
3159 Note that removing the REG_EQUAL note is not an option
3160 on the last insn of a libcall, so we must do a replacement. */
3161 if (! purge_addressof_replacements
3162 && ! purge_bitfield_addressof_replacements
)
3164 /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
3166 (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
3167 [0 S8 A32]), which can be expressed with a simple
3169 if ((GET_MODE_SIZE (GET_MODE (x
))
3170 == GET_MODE_SIZE (GET_MODE (sub
)))
3171 /* Again, invalid pointer casts (as in
3172 compile/990203-1.c) can require paradoxical
3174 || (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
3175 && (GET_MODE_SIZE (GET_MODE (x
))
3176 > GET_MODE_SIZE (GET_MODE (sub
)))))
3178 *loc
= gen_rtx_SUBREG (GET_MODE (x
), sub
, 0);
3181 /* ??? Are there other cases we should handle? */
3183 /* Sometimes we may not be able to find the replacement. For
3184 example when the original insn was a MEM in a wider mode,
3185 and the note is part of a sign extension of a narrowed
3186 version of that MEM. Gcc testcase compile/990829-1.c can
3187 generate an example of this situation. Rather than complain
3188 we return false, which will prompt our caller to remove the
3193 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
3194 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
3196 /* Do not frob unchanging MEMs. If a later reference forces the
3197 pseudo to the stack, we can wind up with multiple writes to
3198 an unchanging memory, which is invalid. */
3199 if (RTX_UNCHANGING_P (x
) && size_x
!= size_sub
)
3202 /* Don't even consider working with paradoxical subregs,
3203 or the moral equivalent seen here. */
3204 else if (size_x
<= size_sub
3205 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
3207 /* Do a bitfield insertion to mirror what would happen
3214 rtx p
= PREV_INSN (insn
);
3217 val
= gen_reg_rtx (GET_MODE (x
));
3218 if (! validate_change (insn
, loc
, val
, 0))
3220 /* Discard the current sequence and put the
3221 ADDRESSOF on stack. */
3227 emit_insn_before (seq
, insn
);
3228 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3232 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
3233 val
, GET_MODE_SIZE (GET_MODE (sub
)));
3235 /* Make sure to unshare any shared rtl that store_bit_field
3236 might have created. */
3237 unshare_all_rtl_again (get_insns ());
3241 p
= emit_insn_after (seq
, insn
);
3242 if (NEXT_INSN (insn
))
3243 compute_insns_for_mem (NEXT_INSN (insn
),
3244 p
? NEXT_INSN (p
) : NULL_RTX
,
3249 rtx p
= PREV_INSN (insn
);
3252 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
3253 GET_MODE (x
), GET_MODE (x
),
3254 GET_MODE_SIZE (GET_MODE (sub
)));
3256 if (! validate_change (insn
, loc
, val
, 0))
3258 /* Discard the current sequence and put the
3259 ADDRESSOF on stack. */
3266 emit_insn_before (seq
, insn
);
3267 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
3271 /* Remember the replacement so that the same one can be done
3272 on the REG_NOTES. */
3273 purge_bitfield_addressof_replacements
3274 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
3277 purge_bitfield_addressof_replacements
));
3279 /* We replaced with a reg -- all done. */
3284 else if (validate_change (insn
, loc
, sub
, 0))
3286 /* Remember the replacement so that the same one can be done
3287 on the REG_NOTES. */
3288 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3292 for (tem
= purge_addressof_replacements
;
3294 tem
= XEXP (XEXP (tem
, 1), 1))
3295 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3297 XEXP (XEXP (tem
, 1), 0) = sub
;
3300 purge_addressof_replacements
3301 = gen_rtx (EXPR_LIST
, VOIDmode
, XEXP (x
, 0),
3302 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3303 purge_addressof_replacements
));
3311 /* Scan all subexpressions. */
3312 fmt
= GET_RTX_FORMAT (code
);
3313 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3316 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0,
3318 else if (*fmt
== 'E')
3319 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3320 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0,
3327 /* Return a hash value for K, a REG. */
3330 insns_for_mem_hash (k
)
3333 /* Use the address of the key for the hash value. */
3334 struct insns_for_mem_entry
*m
= (struct insns_for_mem_entry
*) k
;
3335 return htab_hash_pointer (m
->key
);
3338 /* Return nonzero if K1 and K2 (two REGs) are the same. */
3341 insns_for_mem_comp (k1
, k2
)
3345 struct insns_for_mem_entry
*m1
= (struct insns_for_mem_entry
*) k1
;
3346 struct insns_for_mem_entry
*m2
= (struct insns_for_mem_entry
*) k2
;
3347 return m1
->key
== m2
->key
;
3350 struct insns_for_mem_walk_info
3352 /* The hash table that we are using to record which INSNs use which
3356 /* The INSN we are currently processing. */
3359 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3360 to find the insns that use the REGs in the ADDRESSOFs. */
3364 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3365 that might be used in an ADDRESSOF expression, record this INSN in
3366 the hash table given by DATA (which is really a pointer to an
3367 insns_for_mem_walk_info structure). */
3370 insns_for_mem_walk (r
, data
)
3374 struct insns_for_mem_walk_info
*ifmwi
3375 = (struct insns_for_mem_walk_info
*) data
;
3376 struct insns_for_mem_entry tmp
;
3377 tmp
.insns
= NULL_RTX
;
3379 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3380 && GET_CODE (XEXP (*r
, 0)) == REG
)
3383 tmp
.key
= XEXP (*r
, 0);
3384 e
= htab_find_slot (ifmwi
->ht
, &tmp
, INSERT
);
3387 *e
= ggc_alloc (sizeof (tmp
));
3388 memcpy (*e
, &tmp
, sizeof (tmp
));
3391 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3393 struct insns_for_mem_entry
*ifme
;
3395 ifme
= (struct insns_for_mem_entry
*) htab_find (ifmwi
->ht
, &tmp
);
3397 /* If we have not already recorded this INSN, do so now. Since
3398 we process the INSNs in order, we know that if we have
3399 recorded it it must be at the front of the list. */
3400 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3401 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3408 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3409 which REGs in HT. */
3412 compute_insns_for_mem (insns
, last_insn
, ht
)
3418 struct insns_for_mem_walk_info ifmwi
;
3421 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3422 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3426 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3430 /* Helper function for purge_addressof called through for_each_rtx.
3431 Returns true iff the rtl is an ADDRESSOF. */
3434 is_addressof (rtl
, data
)
3436 void *data ATTRIBUTE_UNUSED
;
3438 return GET_CODE (*rtl
) == ADDRESSOF
;
3441 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3442 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3446 purge_addressof (insns
)
3452 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3453 requires a fixup pass over the instruction stream to correct
3454 INSNs that depended on the REG being a REG, and not a MEM. But,
3455 these fixup passes are slow. Furthermore, most MEMs are not
3456 mentioned in very many instructions. So, we speed up the process
3457 by pre-calculating which REGs occur in which INSNs; that allows
3458 us to perform the fixup passes much more quickly. */
3459 ht
= htab_create_ggc (1000, insns_for_mem_hash
, insns_for_mem_comp
, NULL
);
3460 compute_insns_for_mem (insns
, NULL_RTX
, ht
);
3462 postponed_insns
= NULL
;
3464 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3467 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3468 asm_noperands (PATTERN (insn
)) > 0, 0, 1, ht
))
3469 /* If we could not replace the ADDRESSOFs in the insn,
3470 something is wrong. */
3473 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, 0, ht
))
3475 /* If we could not replace the ADDRESSOFs in the insn's notes,
3476 we can just remove the offending notes instead. */
3479 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3481 /* If we find a REG_RETVAL note then the insn is a libcall.
3482 Such insns must have REG_EQUAL notes as well, in order
3483 for later passes of the compiler to work. So it is not
3484 safe to delete the notes here, and instead we abort. */
3485 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3487 if (for_each_rtx (¬e
, is_addressof
, NULL
))
3488 remove_note (insn
, note
);
3493 /* Process the postponed insns. */
3494 while (postponed_insns
)
3496 insn
= XEXP (postponed_insns
, 0);
3497 tmp
= postponed_insns
;
3498 postponed_insns
= XEXP (postponed_insns
, 1);
3499 free_INSN_LIST_node (tmp
);
3501 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3502 asm_noperands (PATTERN (insn
)) > 0, 0, 0, ht
))
3507 purge_bitfield_addressof_replacements
= 0;
3508 purge_addressof_replacements
= 0;
3510 /* REGs are shared. purge_addressof will destructively replace a REG
3511 with a MEM, which creates shared MEMs.
3513 Unfortunately, the children of put_reg_into_stack assume that MEMs
3514 referring to the same stack slot are shared (fixup_var_refs and
3515 the associated hash table code).
3517 So, we have to do another unsharing pass after we have flushed any
3518 REGs that had their address taken into the stack.
3520 It may be worth tracking whether or not we converted any REGs into
3521 MEMs to avoid this overhead when it is not needed. */
3522 unshare_all_rtl_again (get_insns ());
3525 /* Convert a SET of a hard subreg to a set of the appropriate hard
3526 register. A subroutine of purge_hard_subreg_sets. */
3529 purge_single_hard_subreg_set (pattern
)
3532 rtx reg
= SET_DEST (pattern
);
3533 enum machine_mode mode
= GET_MODE (SET_DEST (pattern
));
3536 if (GET_CODE (reg
) == SUBREG
&& GET_CODE (SUBREG_REG (reg
)) == REG
3537 && REGNO (SUBREG_REG (reg
)) < FIRST_PSEUDO_REGISTER
)
3539 offset
= subreg_regno_offset (REGNO (SUBREG_REG (reg
)),
3540 GET_MODE (SUBREG_REG (reg
)),
3543 reg
= SUBREG_REG (reg
);
3547 if (GET_CODE (reg
) == REG
&& REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
3549 reg
= gen_rtx_REG (mode
, REGNO (reg
) + offset
);
3550 SET_DEST (pattern
) = reg
;
3554 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3555 only such SETs that we expect to see are those left in because
3556 integrate can't handle sets of parts of a return value register.
3558 We don't use alter_subreg because we only want to eliminate subregs
3559 of hard registers. */
3562 purge_hard_subreg_sets (insn
)
3565 for (; insn
; insn
= NEXT_INSN (insn
))
3569 rtx pattern
= PATTERN (insn
);
3570 switch (GET_CODE (pattern
))
3573 if (GET_CODE (SET_DEST (pattern
)) == SUBREG
)
3574 purge_single_hard_subreg_set (pattern
);
3579 for (j
= XVECLEN (pattern
, 0) - 1; j
>= 0; j
--)
3581 rtx inner_pattern
= XVECEXP (pattern
, 0, j
);
3582 if (GET_CODE (inner_pattern
) == SET
3583 && GET_CODE (SET_DEST (inner_pattern
)) == SUBREG
)
3584 purge_single_hard_subreg_set (inner_pattern
);
3595 /* Pass through the INSNS of function FNDECL and convert virtual register
3596 references to hard register references. */
3599 instantiate_virtual_regs (fndecl
, insns
)
3606 /* Compute the offsets to use for this function. */
3607 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3608 var_offset
= STARTING_FRAME_OFFSET
;
3609 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3610 out_arg_offset
= STACK_POINTER_OFFSET
;
3611 cfa_offset
= ARG_POINTER_CFA_OFFSET (fndecl
);
3613 /* Scan all variables and parameters of this function. For each that is
3614 in memory, instantiate all virtual registers if the result is a valid
3615 address. If not, we do it later. That will handle most uses of virtual
3616 regs on many machines. */
3617 instantiate_decls (fndecl
, 1);
3619 /* Initialize recognition, indicating that volatile is OK. */
3622 /* Scan through all the insns, instantiating every virtual register still
3624 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3625 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3626 || GET_CODE (insn
) == CALL_INSN
)
3628 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3629 if (INSN_DELETED_P (insn
))
3631 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3632 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3633 if (GET_CODE (insn
) == CALL_INSN
)
3634 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn
),
3637 /* Past this point all ASM statements should match. Verify that
3638 to avoid failures later in the compilation process. */
3639 if (asm_noperands (PATTERN (insn
)) >= 0
3640 && ! check_asm_operands (PATTERN (insn
)))
3641 instantiate_virtual_regs_lossage (insn
);
3644 /* Instantiate the stack slots for the parm registers, for later use in
3645 addressof elimination. */
3646 for (i
= 0; i
< max_parm_reg
; ++i
)
3647 if (parm_reg_stack_loc
[i
])
3648 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3650 /* Now instantiate the remaining register equivalences for debugging info.
3651 These will not be valid addresses. */
3652 instantiate_decls (fndecl
, 0);
3654 /* Indicate that, from now on, assign_stack_local should use
3655 frame_pointer_rtx. */
3656 virtuals_instantiated
= 1;
3659 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3660 all virtual registers in their DECL_RTL's.
3662 If VALID_ONLY, do this only if the resulting address is still valid.
3663 Otherwise, always do it. */
3666 instantiate_decls (fndecl
, valid_only
)
3672 /* Process all parameters of the function. */
3673 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3675 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3676 HOST_WIDE_INT size_rtl
;
3678 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3680 /* If the parameter was promoted, then the incoming RTL mode may be
3681 larger than the declared type size. We must use the larger of
3683 size_rtl
= GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
)));
3684 size
= MAX (size_rtl
, size
);
3685 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3688 /* Now process all variables defined in the function or its subblocks. */
3689 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3692 /* Subroutine of instantiate_decls: Process all decls in the given
3693 BLOCK node and all its subblocks. */
3696 instantiate_decls_1 (let
, valid_only
)
3702 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3703 if (DECL_RTL_SET_P (t
))
3704 instantiate_decl (DECL_RTL (t
),
3705 int_size_in_bytes (TREE_TYPE (t
)),
3708 /* Process all subblocks. */
3709 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3710 instantiate_decls_1 (t
, valid_only
);
3713 /* Subroutine of the preceding procedures: Given RTL representing a
3714 decl and the size of the object, do any instantiation required.
3716 If VALID_ONLY is nonzero, it means that the RTL should only be
3717 changed if the new address is valid. */
3720 instantiate_decl (x
, size
, valid_only
)
3725 enum machine_mode mode
;
3728 /* If this is not a MEM, no need to do anything. Similarly if the
3729 address is a constant or a register that is not a virtual register. */
3731 if (x
== 0 || GET_CODE (x
) != MEM
)
3735 if (CONSTANT_P (addr
)
3736 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3737 || (GET_CODE (addr
) == REG
3738 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3739 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3742 /* If we should only do this if the address is valid, copy the address.
3743 We need to do this so we can undo any changes that might make the
3744 address invalid. This copy is unfortunate, but probably can't be
3748 addr
= copy_rtx (addr
);
3750 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3752 if (valid_only
&& size
>= 0)
3754 unsigned HOST_WIDE_INT decl_size
= size
;
3756 /* Now verify that the resulting address is valid for every integer or
3757 floating-point mode up to and including SIZE bytes long. We do this
3758 since the object might be accessed in any mode and frame addresses
3761 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3762 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
3763 mode
= GET_MODE_WIDER_MODE (mode
))
3764 if (! memory_address_p (mode
, addr
))
3767 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3768 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= decl_size
;
3769 mode
= GET_MODE_WIDER_MODE (mode
))
3770 if (! memory_address_p (mode
, addr
))
3774 /* Put back the address now that we have updated it and we either know
3775 it is valid or we don't care whether it is valid. */
3780 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3781 is a virtual register, return the equivalent hard register and set the
3782 offset indirectly through the pointer. Otherwise, return 0. */
3785 instantiate_new_reg (x
, poffset
)
3787 HOST_WIDE_INT
*poffset
;
3790 HOST_WIDE_INT offset
;
3792 if (x
== virtual_incoming_args_rtx
)
3793 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3794 else if (x
== virtual_stack_vars_rtx
)
3795 new = frame_pointer_rtx
, offset
= var_offset
;
3796 else if (x
== virtual_stack_dynamic_rtx
)
3797 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3798 else if (x
== virtual_outgoing_args_rtx
)
3799 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3800 else if (x
== virtual_cfa_rtx
)
3801 new = arg_pointer_rtx
, offset
= cfa_offset
;
3810 /* Called when instantiate_virtual_regs has failed to update the instruction.
3811 Usually this means that non-matching instruction has been emit, however for
3812 asm statements it may be the problem in the constraints. */
3814 instantiate_virtual_regs_lossage (insn
)
3817 if (asm_noperands (PATTERN (insn
)) >= 0)
3819 error_for_asm (insn
, "impossible constraint in `asm'");
3825 /* Given a pointer to a piece of rtx and an optional pointer to the
3826 containing object, instantiate any virtual registers present in it.
3828 If EXTRA_INSNS, we always do the replacement and generate
3829 any extra insns before OBJECT. If it zero, we do nothing if replacement
3832 Return 1 if we either had nothing to do or if we were able to do the
3833 needed replacement. Return 0 otherwise; we only return zero if
3834 EXTRA_INSNS is zero.
3836 We first try some simple transformations to avoid the creation of extra
3840 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3848 HOST_WIDE_INT offset
= 0;
3854 /* Re-start here to avoid recursion in common cases. */
3861 /* We may have detected and deleted invalid asm statements. */
3862 if (object
&& INSN_P (object
) && INSN_DELETED_P (object
))
3865 code
= GET_CODE (x
);
3867 /* Check for some special cases. */
3885 /* We are allowed to set the virtual registers. This means that
3886 the actual register should receive the source minus the
3887 appropriate offset. This is used, for example, in the handling
3888 of non-local gotos. */
3889 if ((new = instantiate_new_reg (SET_DEST (x
), &offset
)) != 0)
3891 rtx src
= SET_SRC (x
);
3893 /* We are setting the register, not using it, so the relevant
3894 offset is the negative of the offset to use were we using
3897 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3899 /* The only valid sources here are PLUS or REG. Just do
3900 the simplest possible thing to handle them. */
3901 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3903 instantiate_virtual_regs_lossage (object
);
3908 if (GET_CODE (src
) != REG
)
3909 temp
= force_operand (src
, NULL_RTX
);
3912 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3916 emit_insn_before (seq
, object
);
3919 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3921 instantiate_virtual_regs_lossage (object
);
3926 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3931 /* Handle special case of virtual register plus constant. */
3932 if (CONSTANT_P (XEXP (x
, 1)))
3934 rtx old
, new_offset
;
3936 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3937 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3939 if ((new = instantiate_new_reg (XEXP (XEXP (x
, 0), 0), &offset
)))
3941 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3943 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3952 #ifdef POINTERS_EXTEND_UNSIGNED
3953 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3954 we can commute the PLUS and SUBREG because pointers into the
3955 frame are well-behaved. */
3956 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
&& GET_MODE (x
) == ptr_mode
3957 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3959 = instantiate_new_reg (SUBREG_REG (XEXP (x
, 0)),
3961 && validate_change (object
, loc
,
3962 plus_constant (gen_lowpart (ptr_mode
,
3965 + INTVAL (XEXP (x
, 1))),
3969 else if ((new = instantiate_new_reg (XEXP (x
, 0), &offset
)) == 0)
3971 /* We know the second operand is a constant. Unless the
3972 first operand is a REG (which has been already checked),
3973 it needs to be checked. */
3974 if (GET_CODE (XEXP (x
, 0)) != REG
)
3982 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3984 /* If the new constant is zero, try to replace the sum with just
3986 if (new_offset
== const0_rtx
3987 && validate_change (object
, loc
, new, 0))
3990 /* Next try to replace the register and new offset.
3991 There are two changes to validate here and we can't assume that
3992 in the case of old offset equals new just changing the register
3993 will yield a valid insn. In the interests of a little efficiency,
3994 however, we only call validate change once (we don't queue up the
3995 changes and then call apply_change_group). */
3999 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
4000 : (XEXP (x
, 0) = new,
4001 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
4009 /* Otherwise copy the new constant into a register and replace
4010 constant with that register. */
4011 temp
= gen_reg_rtx (Pmode
);
4013 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
4014 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
4017 /* If that didn't work, replace this expression with a
4018 register containing the sum. */
4021 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
4024 temp
= force_operand (new, NULL_RTX
);
4028 emit_insn_before (seq
, object
);
4029 if (! validate_change (object
, loc
, temp
, 0)
4030 && ! validate_replace_rtx (x
, temp
, object
))
4032 instantiate_virtual_regs_lossage (object
);
4041 /* Fall through to generic two-operand expression case. */
4047 case DIV
: case UDIV
:
4048 case MOD
: case UMOD
:
4049 case AND
: case IOR
: case XOR
:
4050 case ROTATERT
: case ROTATE
:
4051 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
4053 case GE
: case GT
: case GEU
: case GTU
:
4054 case LE
: case LT
: case LEU
: case LTU
:
4055 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
4056 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
4061 /* Most cases of MEM that convert to valid addresses have already been
4062 handled by our scan of decls. The only special handling we
4063 need here is to make a copy of the rtx to ensure it isn't being
4064 shared if we have to change it to a pseudo.
4066 If the rtx is a simple reference to an address via a virtual register,
4067 it can potentially be shared. In such cases, first try to make it
4068 a valid address, which can also be shared. Otherwise, copy it and
4071 First check for common cases that need no processing. These are
4072 usually due to instantiation already being done on a previous instance
4076 if (CONSTANT_ADDRESS_P (temp
)
4077 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4078 || temp
== arg_pointer_rtx
4080 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4081 || temp
== hard_frame_pointer_rtx
4083 || temp
== frame_pointer_rtx
)
4086 if (GET_CODE (temp
) == PLUS
4087 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
4088 && (XEXP (temp
, 0) == frame_pointer_rtx
4089 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4090 || XEXP (temp
, 0) == hard_frame_pointer_rtx
4092 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4093 || XEXP (temp
, 0) == arg_pointer_rtx
4098 if (temp
== virtual_stack_vars_rtx
4099 || temp
== virtual_incoming_args_rtx
4100 || (GET_CODE (temp
) == PLUS
4101 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
4102 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
4103 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
4105 /* This MEM may be shared. If the substitution can be done without
4106 the need to generate new pseudos, we want to do it in place
4107 so all copies of the shared rtx benefit. The call below will
4108 only make substitutions if the resulting address is still
4111 Note that we cannot pass X as the object in the recursive call
4112 since the insn being processed may not allow all valid
4113 addresses. However, if we were not passed on object, we can
4114 only modify X without copying it if X will have a valid
4117 ??? Also note that this can still lose if OBJECT is an insn that
4118 has less restrictions on an address that some other insn.
4119 In that case, we will modify the shared address. This case
4120 doesn't seem very likely, though. One case where this could
4121 happen is in the case of a USE or CLOBBER reference, but we
4122 take care of that below. */
4124 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
4125 object
? object
: x
, 0))
4128 /* Otherwise make a copy and process that copy. We copy the entire
4129 RTL expression since it might be a PLUS which could also be
4131 *loc
= x
= copy_rtx (x
);
4134 /* Fall through to generic unary operation case. */
4137 case STRICT_LOW_PART
:
4139 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
4140 case SIGN_EXTEND
: case ZERO_EXTEND
:
4141 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
4142 case FLOAT
: case FIX
:
4143 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
4148 case POPCOUNT
: case PARITY
:
4149 /* These case either have just one operand or we know that we need not
4150 check the rest of the operands. */
4156 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4157 go ahead and make the invalid one, but do it to a copy. For a REG,
4158 just make the recursive call, since there's no chance of a problem. */
4160 if ((GET_CODE (XEXP (x
, 0)) == MEM
4161 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
4163 || (GET_CODE (XEXP (x
, 0)) == REG
4164 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
4167 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
4172 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4173 in front of this insn and substitute the temporary. */
4174 if ((new = instantiate_new_reg (x
, &offset
)) != 0)
4176 temp
= plus_constant (new, offset
);
4177 if (!validate_change (object
, loc
, temp
, 0))
4183 temp
= force_operand (temp
, NULL_RTX
);
4187 emit_insn_before (seq
, object
);
4188 if (! validate_change (object
, loc
, temp
, 0)
4189 && ! validate_replace_rtx (x
, temp
, object
))
4190 instantiate_virtual_regs_lossage (object
);
4197 if (GET_CODE (XEXP (x
, 0)) == REG
)
4200 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
4202 /* If we have a (addressof (mem ..)), do any instantiation inside
4203 since we know we'll be making the inside valid when we finally
4204 remove the ADDRESSOF. */
4205 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
4214 /* Scan all subexpressions. */
4215 fmt
= GET_RTX_FORMAT (code
);
4216 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
4219 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
4222 else if (*fmt
== 'E')
4223 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4224 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
4231 /* Optimization: assuming this function does not receive nonlocal gotos,
4232 delete the handlers for such, as well as the insns to establish
4233 and disestablish them. */
4239 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4241 /* Delete the handler by turning off the flag that would
4242 prevent jump_optimize from deleting it.
4243 Also permit deletion of the nonlocal labels themselves
4244 if nothing local refers to them. */
4245 if (GET_CODE (insn
) == CODE_LABEL
)
4249 LABEL_PRESERVE_P (insn
) = 0;
4251 /* Remove it from the nonlocal_label list, to avoid confusing
4253 for (t
= nonlocal_labels
, last_t
= 0; t
;
4254 last_t
= t
, t
= TREE_CHAIN (t
))
4255 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
4260 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
4262 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
4265 if (GET_CODE (insn
) == INSN
)
4269 for (t
= nonlocal_goto_handler_slots
; t
!= 0; t
= XEXP (t
, 1))
4270 if (reg_mentioned_p (t
, PATTERN (insn
)))
4276 || (nonlocal_goto_stack_level
!= 0
4277 && reg_mentioned_p (nonlocal_goto_stack_level
,
4279 delete_related_insns (insn
);
4284 /* Return the first insn following those generated by `assign_parms'. */
4287 get_first_nonparm_insn ()
4290 return NEXT_INSN (last_parm_insn
);
4291 return get_insns ();
4294 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4295 This means a type for which function calls must pass an address to the
4296 function or get an address back from the function.
4297 EXP may be a type node or an expression (whose type is tested). */
4300 aggregate_value_p (exp
)
4303 int i
, regno
, nregs
;
4306 tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
4308 if (TREE_CODE (type
) == VOID_TYPE
)
4310 if (RETURN_IN_MEMORY (type
))
4312 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4313 and thus can't be returned in registers. */
4314 if (TREE_ADDRESSABLE (type
))
4316 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
4318 /* Make sure we have suitable call-clobbered regs to return
4319 the value in; if not, we must return it in memory. */
4320 reg
= hard_function_value (type
, 0, 0);
4322 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4324 if (GET_CODE (reg
) != REG
)
4327 regno
= REGNO (reg
);
4328 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
4329 for (i
= 0; i
< nregs
; i
++)
4330 if (! call_used_regs
[regno
+ i
])
4335 /* Assign RTL expressions to the function's parameters.
4336 This may involve copying them into registers and using
4337 those registers as the RTL for them. */
4340 assign_parms (fndecl
)
4344 CUMULATIVE_ARGS args_so_far
;
4345 /* Total space needed so far for args on the stack,
4346 given as a constant and a tree-expression. */
4347 struct args_size stack_args_size
;
4348 tree fntype
= TREE_TYPE (fndecl
);
4349 tree fnargs
= DECL_ARGUMENTS (fndecl
);
4350 /* This is used for the arg pointer when referring to stack args. */
4351 rtx internal_arg_pointer
;
4352 /* This is a dummy PARM_DECL that we used for the function result if
4353 the function returns a structure. */
4354 tree function_result_decl
= 0;
4355 #ifdef SETUP_INCOMING_VARARGS
4356 int varargs_setup
= 0;
4358 int reg_parm_stack_space
= 0;
4359 rtx conversion_insns
= 0;
4361 /* Nonzero if function takes extra anonymous args.
4362 This means the last named arg must be on the stack
4363 right before the anonymous ones. */
4365 = (TYPE_ARG_TYPES (fntype
) != 0
4366 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4367 != void_type_node
));
4369 current_function_stdarg
= stdarg
;
4371 /* If the reg that the virtual arg pointer will be translated into is
4372 not a fixed reg or is the stack pointer, make a copy of the virtual
4373 arg pointer, and address parms via the copy. The frame pointer is
4374 considered fixed even though it is not marked as such.
4376 The second time through, simply use ap to avoid generating rtx. */
4378 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4379 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4380 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4381 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4383 internal_arg_pointer
= virtual_incoming_args_rtx
;
4384 current_function_internal_arg_pointer
= internal_arg_pointer
;
4386 stack_args_size
.constant
= 0;
4387 stack_args_size
.var
= 0;
4389 /* If struct value address is treated as the first argument, make it so. */
4390 if (aggregate_value_p (DECL_RESULT (fndecl
))
4391 && ! current_function_returns_pcc_struct
4392 && struct_value_incoming_rtx
== 0)
4394 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4396 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4398 DECL_ARG_TYPE (function_result_decl
) = type
;
4399 TREE_CHAIN (function_result_decl
) = fnargs
;
4400 fnargs
= function_result_decl
;
4403 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4404 parm_reg_stack_loc
= (rtx
*) ggc_alloc_cleared (max_parm_reg
* sizeof (rtx
));
4406 #ifdef REG_PARM_STACK_SPACE
4407 #ifdef MAYBE_REG_PARM_STACK_SPACE
4408 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4410 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4414 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4415 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4417 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, fndecl
);
4420 /* We haven't yet found an argument that we must push and pretend the
4422 current_function_pretend_args_size
= 0;
4424 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4428 enum machine_mode promoted_mode
, passed_mode
;
4429 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4431 struct locate_and_pad_arg_data locate
;
4432 int passed_pointer
= 0;
4433 int did_conversion
= 0;
4434 tree passed_type
= DECL_ARG_TYPE (parm
);
4435 tree nominal_type
= TREE_TYPE (parm
);
4436 int last_named
= 0, named_arg
;
4440 /* Set LAST_NAMED if this is last named arg before last
4446 for (tem
= TREE_CHAIN (parm
); tem
; tem
= TREE_CHAIN (tem
))
4447 if (DECL_NAME (tem
))
4453 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4454 most machines, if this is a varargs/stdarg function, then we treat
4455 the last named arg as if it were anonymous too. */
4456 named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
4458 if (TREE_TYPE (parm
) == error_mark_node
4459 /* This can happen after weird syntax errors
4460 or if an enum type is defined among the parms. */
4461 || TREE_CODE (parm
) != PARM_DECL
4462 || passed_type
== NULL
)
4464 SET_DECL_RTL (parm
, gen_rtx_MEM (BLKmode
, const0_rtx
));
4465 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
4466 TREE_USED (parm
) = 1;
4470 /* Find mode of arg as it is passed, and mode of arg
4471 as it should be during execution of this function. */
4472 passed_mode
= TYPE_MODE (passed_type
);
4473 nominal_mode
= TYPE_MODE (nominal_type
);
4475 /* If the parm's mode is VOID, its value doesn't matter,
4476 and avoid the usual things like emit_move_insn that could crash. */
4477 if (nominal_mode
== VOIDmode
)
4479 SET_DECL_RTL (parm
, const0_rtx
);
4480 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
4484 /* If the parm is to be passed as a transparent union, use the
4485 type of the first field for the tests below. We have already
4486 verified that the modes are the same. */
4487 if (DECL_TRANSPARENT_UNION (parm
)
4488 || (TREE_CODE (passed_type
) == UNION_TYPE
4489 && TYPE_TRANSPARENT_UNION (passed_type
)))
4490 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4492 /* See if this arg was passed by invisible reference. It is if
4493 it is an object whose size depends on the contents of the
4494 object itself or if the machine requires these objects be passed
4497 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
4498 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
4499 || TREE_ADDRESSABLE (passed_type
)
4500 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4501 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4502 passed_type
, named_arg
)
4506 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4508 passed_mode
= nominal_mode
= Pmode
;
4510 /* See if the frontend wants to pass this by invisible reference. */
4511 else if (passed_type
!= nominal_type
4512 && POINTER_TYPE_P (passed_type
)
4513 && TREE_TYPE (passed_type
) == nominal_type
)
4515 nominal_type
= passed_type
;
4517 passed_mode
= nominal_mode
= Pmode
;
4520 promoted_mode
= passed_mode
;
4522 #ifdef PROMOTE_FUNCTION_ARGS
4523 /* Compute the mode in which the arg is actually extended to. */
4524 unsignedp
= TREE_UNSIGNED (passed_type
);
4525 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
4528 /* Let machine desc say which reg (if any) the parm arrives in.
4529 0 means it arrives on the stack. */
4530 #ifdef FUNCTION_INCOMING_ARG
4531 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4532 passed_type
, named_arg
);
4534 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4535 passed_type
, named_arg
);
4538 if (entry_parm
== 0)
4539 promoted_mode
= passed_mode
;
4541 #ifdef SETUP_INCOMING_VARARGS
4542 /* If this is the last named parameter, do any required setup for
4543 varargs or stdargs. We need to know about the case of this being an
4544 addressable type, in which case we skip the registers it
4545 would have arrived in.
4547 For stdargs, LAST_NAMED will be set for two parameters, the one that
4548 is actually the last named, and the dummy parameter. We only
4549 want to do this action once.
4551 Also, indicate when RTL generation is to be suppressed. */
4552 if (last_named
&& !varargs_setup
)
4554 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
4555 current_function_pretend_args_size
, 0);
4560 /* Determine parm's home in the stack,
4561 in case it arrives in the stack or we should pretend it did.
4563 Compute the stack position and rtx where the argument arrives
4566 There is one complexity here: If this was a parameter that would
4567 have been passed in registers, but wasn't only because it is
4568 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4569 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4570 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4571 0 as it was the previous time. */
4572 in_regs
= entry_parm
!= 0;
4573 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4576 if (!in_regs
&& !named_arg
)
4578 int pretend_named
= PRETEND_OUTGOING_VARARGS_NAMED
;
4581 #ifdef FUNCTION_INCOMING_ARG
4582 in_regs
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4584 pretend_named
) != 0;
4586 in_regs
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4588 pretend_named
) != 0;
4593 /* If this parameter was passed both in registers and in the stack,
4594 use the copy on the stack. */
4595 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4598 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4600 partial
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4601 passed_type
, named_arg
);
4604 memset (&locate
, 0, sizeof (locate
));
4605 locate_and_pad_parm (promoted_mode
, passed_type
, in_regs
,
4606 entry_parm
? partial
: 0, fndecl
,
4607 &stack_args_size
, &locate
);
4612 /* If we're passing this arg using a reg, make its stack home
4613 the aligned stack slot. */
4615 offset_rtx
= ARGS_SIZE_RTX (locate
.slot_offset
);
4617 offset_rtx
= ARGS_SIZE_RTX (locate
.offset
);
4619 if (offset_rtx
== const0_rtx
)
4620 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4622 stack_parm
= gen_rtx_MEM (promoted_mode
,
4623 gen_rtx_PLUS (Pmode
,
4624 internal_arg_pointer
,
4627 set_mem_attributes (stack_parm
, parm
, 1);
4629 /* Set also REG_ATTRS if parameter was passed in a register. */
4631 set_reg_attrs_for_parm (entry_parm
, stack_parm
);
4634 /* If this parm was passed part in regs and part in memory,
4635 pretend it arrived entirely in memory
4636 by pushing the register-part onto the stack.
4638 In the special case of a DImode or DFmode that is split,
4639 we could put it together in a pseudoreg directly,
4640 but for now that's not worth bothering with. */
4644 #ifndef MAYBE_REG_PARM_STACK_SPACE
4645 /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4646 split parameters was allocated by our caller, so we
4647 won't be pushing it in the prolog. */
4648 if (reg_parm_stack_space
== 0)
4650 current_function_pretend_args_size
4651 = (((partial
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4652 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4653 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4655 /* Handle calls that pass values in multiple non-contiguous
4656 locations. The Irix 6 ABI has examples of this. */
4657 if (GET_CODE (entry_parm
) == PARALLEL
)
4658 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4659 int_size_in_bytes (TREE_TYPE (parm
)));
4662 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
4665 entry_parm
= stack_parm
;
4668 /* If we didn't decide this parm came in a register,
4669 by default it came on the stack. */
4670 if (entry_parm
== 0)
4671 entry_parm
= stack_parm
;
4673 /* Record permanently how this parm was passed. */
4674 DECL_INCOMING_RTL (parm
) = entry_parm
;
4676 /* If there is actually space on the stack for this parm,
4677 count it in stack_args_size; otherwise set stack_parm to 0
4678 to indicate there is no preallocated stack slot for the parm. */
4680 if (entry_parm
== stack_parm
4681 || (GET_CODE (entry_parm
) == PARALLEL
4682 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4683 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4684 /* On some machines, even if a parm value arrives in a register
4685 there is still an (uninitialized) stack slot allocated for it.
4687 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4688 whether this parameter already has a stack slot allocated,
4689 because an arg block exists only if current_function_args_size
4690 is larger than some threshold, and we haven't calculated that
4691 yet. So, for now, we just assume that stack slots never exist
4693 || REG_PARM_STACK_SPACE (fndecl
) > 0
4697 stack_args_size
.constant
+= locate
.size
.constant
;
4698 /* locate.size doesn't include the part in regs. */
4700 stack_args_size
.constant
+= current_function_pretend_args_size
;
4701 if (locate
.size
.var
)
4702 ADD_PARM_SIZE (stack_args_size
, locate
.size
.var
);
4705 /* No stack slot was pushed for this parm. */
4708 /* Update info on where next arg arrives in registers. */
4710 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4711 passed_type
, named_arg
);
4713 /* If we can't trust the parm stack slot to be aligned enough
4714 for its ultimate type, don't use that slot after entry.
4715 We'll make another stack slot, if we need one. */
4717 unsigned int thisparm_boundary
4718 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4720 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4724 /* If parm was passed in memory, and we need to convert it on entry,
4725 don't store it back in that same slot. */
4726 if (entry_parm
== stack_parm
4727 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4730 /* When an argument is passed in multiple locations, we can't
4731 make use of this information, but we can save some copying if
4732 the whole argument is passed in a single register. */
4733 if (GET_CODE (entry_parm
) == PARALLEL
4734 && nominal_mode
!= BLKmode
&& passed_mode
!= BLKmode
)
4736 int i
, len
= XVECLEN (entry_parm
, 0);
4738 for (i
= 0; i
< len
; i
++)
4739 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
4740 && GET_CODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0)) == REG
4741 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
4743 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
4745 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
4746 DECL_INCOMING_RTL (parm
) = entry_parm
;
4751 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4752 in the mode in which it arrives.
4753 STACK_PARM is an RTX for a stack slot where the parameter can live
4754 during the function (in case we want to put it there).
4755 STACK_PARM is 0 if no stack slot was pushed for it.
4757 Now output code if necessary to convert ENTRY_PARM to
4758 the type in which this function declares it,
4759 and store that result in an appropriate place,
4760 which may be a pseudo reg, may be STACK_PARM,
4761 or may be a local stack slot if STACK_PARM is 0.
4763 Set DECL_RTL to that place. */
4765 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4767 /* If a BLKmode arrives in registers, copy it to a stack slot.
4768 Handle calls that pass values in multiple non-contiguous
4769 locations. The Irix 6 ABI has examples of this. */
4770 if (GET_CODE (entry_parm
) == REG
4771 || GET_CODE (entry_parm
) == PARALLEL
)
4773 int size
= int_size_in_bytes (TREE_TYPE (parm
));
4774 int size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
4777 /* Note that we will be storing an integral number of words.
4778 So we have to be careful to ensure that we allocate an
4779 integral number of words. We do this below in the
4780 assign_stack_local if space was not allocated in the argument
4781 list. If it was, this will not work if PARM_BOUNDARY is not
4782 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4783 if it becomes a problem. */
4785 if (stack_parm
== 0)
4788 = assign_stack_local (GET_MODE (entry_parm
),
4790 set_mem_attributes (stack_parm
, parm
, 1);
4793 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4796 mem
= validize_mem (stack_parm
);
4798 /* Handle calls that pass values in multiple non-contiguous
4799 locations. The Irix 6 ABI has examples of this. */
4800 if (GET_CODE (entry_parm
) == PARALLEL
)
4801 emit_group_store (mem
, entry_parm
, size
);
4806 /* If SIZE is that of a mode no bigger than a word, just use
4807 that mode's store operation. */
4808 else if (size
<= UNITS_PER_WORD
)
4810 enum machine_mode mode
4811 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
4813 if (mode
!= BLKmode
)
4815 rtx reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
4816 emit_move_insn (change_address (mem
, mode
, 0), reg
);
4819 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
4820 machine must be aligned to the left before storing
4821 to memory. Note that the previous test doesn't
4822 handle all cases (e.g. SIZE == 3). */
4823 else if (size
!= UNITS_PER_WORD
4824 && BYTES_BIG_ENDIAN
)
4827 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
4828 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
4830 x
= expand_binop (word_mode
, ashl_optab
, reg
,
4831 GEN_INT (by
), 0, 1, OPTAB_WIDEN
);
4832 tem
= change_address (mem
, word_mode
, 0);
4833 emit_move_insn (tem
, x
);
4836 move_block_from_reg (REGNO (entry_parm
), mem
,
4837 size_stored
/ UNITS_PER_WORD
);
4840 move_block_from_reg (REGNO (entry_parm
), mem
,
4841 size_stored
/ UNITS_PER_WORD
);
4843 SET_DECL_RTL (parm
, stack_parm
);
4845 else if (! ((! optimize
4846 && ! DECL_REGISTER (parm
))
4847 || TREE_SIDE_EFFECTS (parm
)
4848 /* If -ffloat-store specified, don't put explicit
4849 float variables into registers. */
4850 || (flag_float_store
4851 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4852 /* Always assign pseudo to structure return or item passed
4853 by invisible reference. */
4854 || passed_pointer
|| parm
== function_result_decl
)
4856 /* Store the parm in a pseudoregister during the function, but we
4857 may need to do it in a wider mode. */
4860 unsigned int regno
, regnoi
= 0, regnor
= 0;
4862 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4864 promoted_nominal_mode
4865 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4867 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4868 mark_user_reg (parmreg
);
4870 /* If this was an item that we received a pointer to, set DECL_RTL
4874 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)),
4876 set_mem_attributes (x
, parm
, 1);
4877 SET_DECL_RTL (parm
, x
);
4881 SET_DECL_RTL (parm
, parmreg
);
4882 maybe_set_unchanging (DECL_RTL (parm
), parm
);
4885 /* Copy the value into the register. */
4886 if (nominal_mode
!= passed_mode
4887 || promoted_nominal_mode
!= promoted_mode
)
4890 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4891 mode, by the caller. We now have to convert it to
4892 NOMINAL_MODE, if different. However, PARMREG may be in
4893 a different mode than NOMINAL_MODE if it is being stored
4896 If ENTRY_PARM is a hard register, it might be in a register
4897 not valid for operating in its mode (e.g., an odd-numbered
4898 register for a DFmode). In that case, moves are the only
4899 thing valid, so we can't do a convert from there. This
4900 occurs when the calling sequence allow such misaligned
4903 In addition, the conversion may involve a call, which could
4904 clobber parameters which haven't been copied to pseudo
4905 registers yet. Therefore, we must first copy the parm to
4906 a pseudo reg here, and save the conversion until after all
4907 parameters have been moved. */
4909 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4911 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4913 push_to_sequence (conversion_insns
);
4914 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4916 if (GET_CODE (tempreg
) == SUBREG
4917 && GET_MODE (tempreg
) == nominal_mode
4918 && GET_CODE (SUBREG_REG (tempreg
)) == REG
4919 && nominal_mode
== passed_mode
4920 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (entry_parm
)
4921 && GET_MODE_SIZE (GET_MODE (tempreg
))
4922 < GET_MODE_SIZE (GET_MODE (entry_parm
)))
4924 /* The argument is already sign/zero extended, so note it
4926 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
4927 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
4930 /* TREE_USED gets set erroneously during expand_assignment. */
4931 save_tree_used
= TREE_USED (parm
);
4932 expand_assignment (parm
,
4933 make_tree (nominal_type
, tempreg
), 0, 0);
4934 TREE_USED (parm
) = save_tree_used
;
4935 conversion_insns
= get_insns ();
4940 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4942 /* If we were passed a pointer but the actual value
4943 can safely live in a register, put it in one. */
4944 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4945 /* If by-reference argument was promoted, demote it. */
4946 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
4948 && ! DECL_REGISTER (parm
))
4949 || TREE_SIDE_EFFECTS (parm
)
4950 /* If -ffloat-store specified, don't put explicit
4951 float variables into registers. */
4952 || (flag_float_store
4953 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))))
4955 /* We can't use nominal_mode, because it will have been set to
4956 Pmode above. We must use the actual mode of the parm. */
4957 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4958 mark_user_reg (parmreg
);
4959 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
4961 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
4962 int unsigned_p
= TREE_UNSIGNED (TREE_TYPE (parm
));
4963 push_to_sequence (conversion_insns
);
4964 emit_move_insn (tempreg
, DECL_RTL (parm
));
4966 convert_to_mode (GET_MODE (parmreg
),
4969 emit_move_insn (parmreg
, DECL_RTL (parm
));
4970 conversion_insns
= get_insns();
4975 emit_move_insn (parmreg
, DECL_RTL (parm
));
4976 SET_DECL_RTL (parm
, parmreg
);
4977 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4981 #ifdef FUNCTION_ARG_CALLEE_COPIES
4982 /* If we are passed an arg by reference and it is our responsibility
4983 to make a copy, do it now.
4984 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4985 original argument, so we must recreate them in the call to
4986 FUNCTION_ARG_CALLEE_COPIES. */
4987 /* ??? Later add code to handle the case that if the argument isn't
4988 modified, don't do the copy. */
4990 else if (passed_pointer
4991 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4992 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4993 DECL_ARG_TYPE (parm
),
4995 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4998 tree type
= DECL_ARG_TYPE (parm
);
5000 /* This sequence may involve a library call perhaps clobbering
5001 registers that haven't been copied to pseudos yet. */
5003 push_to_sequence (conversion_insns
);
5005 if (!COMPLETE_TYPE_P (type
)
5006 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5007 /* This is a variable sized object. */
5008 copy
= gen_rtx_MEM (BLKmode
,
5009 allocate_dynamic_stack_space
5010 (expr_size (parm
), NULL_RTX
,
5011 TYPE_ALIGN (type
)));
5013 copy
= assign_stack_temp (TYPE_MODE (type
),
5014 int_size_in_bytes (type
), 1);
5015 set_mem_attributes (copy
, parm
, 1);
5017 store_expr (parm
, copy
, 0);
5018 emit_move_insn (parmreg
, XEXP (copy
, 0));
5019 conversion_insns
= get_insns ();
5023 #endif /* FUNCTION_ARG_CALLEE_COPIES */
5025 /* In any case, record the parm's desired stack location
5026 in case we later discover it must live in the stack.
5028 If it is a COMPLEX value, store the stack location for both
5031 if (GET_CODE (parmreg
) == CONCAT
)
5032 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
5034 regno
= REGNO (parmreg
);
5036 if (regno
>= max_parm_reg
)
5039 int old_max_parm_reg
= max_parm_reg
;
5041 /* It's slow to expand this one register at a time,
5042 but it's also rare and we need max_parm_reg to be
5043 precisely correct. */
5044 max_parm_reg
= regno
+ 1;
5045 new = (rtx
*) ggc_realloc (parm_reg_stack_loc
,
5046 max_parm_reg
* sizeof (rtx
));
5047 memset ((char *) (new + old_max_parm_reg
), 0,
5048 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
5049 parm_reg_stack_loc
= new;
5052 if (GET_CODE (parmreg
) == CONCAT
)
5054 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
5056 regnor
= REGNO (gen_realpart (submode
, parmreg
));
5057 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
5059 if (stack_parm
!= 0)
5061 parm_reg_stack_loc
[regnor
]
5062 = gen_realpart (submode
, stack_parm
);
5063 parm_reg_stack_loc
[regnoi
]
5064 = gen_imagpart (submode
, stack_parm
);
5068 parm_reg_stack_loc
[regnor
] = 0;
5069 parm_reg_stack_loc
[regnoi
] = 0;
5073 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
5075 /* Mark the register as eliminable if we did no conversion
5076 and it was copied from memory at a fixed offset,
5077 and the arg pointer was not copied to a pseudo-reg.
5078 If the arg pointer is a pseudo reg or the offset formed
5079 an invalid address, such memory-equivalences
5080 as we make here would screw up life analysis for it. */
5081 if (nominal_mode
== passed_mode
5084 && GET_CODE (stack_parm
) == MEM
5085 && locate
.offset
.var
== 0
5086 && reg_mentioned_p (virtual_incoming_args_rtx
,
5087 XEXP (stack_parm
, 0)))
5089 rtx linsn
= get_last_insn ();
5092 /* Mark complex types separately. */
5093 if (GET_CODE (parmreg
) == CONCAT
)
5094 /* Scan backwards for the set of the real and
5096 for (sinsn
= linsn
; sinsn
!= 0;
5097 sinsn
= prev_nonnote_insn (sinsn
))
5099 set
= single_set (sinsn
);
5101 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
5103 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5104 parm_reg_stack_loc
[regnoi
],
5107 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
5109 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5110 parm_reg_stack_loc
[regnor
],
5113 else if ((set
= single_set (linsn
)) != 0
5114 && SET_DEST (set
) == parmreg
)
5116 = gen_rtx_EXPR_LIST (REG_EQUIV
,
5117 stack_parm
, REG_NOTES (linsn
));
5120 /* For pointer data type, suggest pointer register. */
5121 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
5122 mark_reg_pointer (parmreg
,
5123 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5125 /* If something wants our address, try to use ADDRESSOF. */
5126 if (TREE_ADDRESSABLE (parm
))
5128 /* If we end up putting something into the stack,
5129 fixup_var_refs_insns will need to make a pass over
5130 all the instructions. It looks through the pending
5131 sequences -- but it can't see the ones in the
5132 CONVERSION_INSNS, if they're not on the sequence
5133 stack. So, we go back to that sequence, just so that
5134 the fixups will happen. */
5135 push_to_sequence (conversion_insns
);
5136 put_var_into_stack (parm
, /*rescan=*/true);
5137 conversion_insns
= get_insns ();
5143 /* Value must be stored in the stack slot STACK_PARM
5144 during function execution. */
5146 if (promoted_mode
!= nominal_mode
)
5148 /* Conversion is required. */
5149 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
5151 emit_move_insn (tempreg
, validize_mem (entry_parm
));
5153 push_to_sequence (conversion_insns
);
5154 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
5155 TREE_UNSIGNED (TREE_TYPE (parm
)));
5157 /* ??? This may need a big-endian conversion on sparc64. */
5158 stack_parm
= adjust_address (stack_parm
, nominal_mode
, 0);
5160 conversion_insns
= get_insns ();
5165 if (entry_parm
!= stack_parm
)
5167 if (stack_parm
== 0)
5170 = assign_stack_local (GET_MODE (entry_parm
),
5171 GET_MODE_SIZE (GET_MODE (entry_parm
)),
5173 set_mem_attributes (stack_parm
, parm
, 1);
5176 if (promoted_mode
!= nominal_mode
)
5178 push_to_sequence (conversion_insns
);
5179 emit_move_insn (validize_mem (stack_parm
),
5180 validize_mem (entry_parm
));
5181 conversion_insns
= get_insns ();
5185 emit_move_insn (validize_mem (stack_parm
),
5186 validize_mem (entry_parm
));
5189 SET_DECL_RTL (parm
, stack_parm
);
5193 /* Output all parameter conversion instructions (possibly including calls)
5194 now that all parameters have been copied out of hard registers. */
5195 emit_insn (conversion_insns
);
5197 /* If we are receiving a struct value address as the first argument, set up
5198 the RTL for the function result. As this might require code to convert
5199 the transmitted address to Pmode, we do this here to ensure that possible
5200 preliminary conversions of the address have been emitted already. */
5201 if (function_result_decl
)
5203 tree result
= DECL_RESULT (fndecl
);
5204 rtx addr
= DECL_RTL (function_result_decl
);
5207 #ifdef POINTERS_EXTEND_UNSIGNED
5208 if (GET_MODE (addr
) != Pmode
)
5209 addr
= convert_memory_address (Pmode
, addr
);
5212 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
5213 set_mem_attributes (x
, result
, 1);
5214 SET_DECL_RTL (result
, x
);
5217 last_parm_insn
= get_last_insn ();
5219 current_function_args_size
= stack_args_size
.constant
;
5221 /* Adjust function incoming argument size for alignment and
5224 #ifdef REG_PARM_STACK_SPACE
5225 #ifndef MAYBE_REG_PARM_STACK_SPACE
5226 current_function_args_size
= MAX (current_function_args_size
,
5227 REG_PARM_STACK_SPACE (fndecl
));
5231 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5233 current_function_args_size
5234 = ((current_function_args_size
+ STACK_BYTES
- 1)
5235 / STACK_BYTES
) * STACK_BYTES
;
5237 #ifdef ARGS_GROW_DOWNWARD
5238 current_function_arg_offset_rtx
5239 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
5240 : expand_expr (size_diffop (stack_args_size
.var
,
5241 size_int (-stack_args_size
.constant
)),
5242 NULL_RTX
, VOIDmode
, 0));
5244 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
5247 /* See how many bytes, if any, of its args a function should try to pop
5250 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
5251 current_function_args_size
);
5253 /* For stdarg.h function, save info about
5254 regs and stack space used by the named args. */
5256 current_function_args_info
= args_so_far
;
5258 /* Set the rtx used for the function return value. Put this in its
5259 own variable so any optimizers that need this information don't have
5260 to include tree.h. Do this here so it gets done when an inlined
5261 function gets output. */
5263 current_function_return_rtx
5264 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
5265 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
5267 /* If scalar return value was computed in a pseudo-reg, or was a named
5268 return value that got dumped to the stack, copy that to the hard
5270 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
5272 tree decl_result
= DECL_RESULT (fndecl
);
5273 rtx decl_rtl
= DECL_RTL (decl_result
);
5275 if (REG_P (decl_rtl
)
5276 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5277 : DECL_REGISTER (decl_result
))
5281 #ifdef FUNCTION_OUTGOING_VALUE
5282 real_decl_rtl
= FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result
),
5285 real_decl_rtl
= FUNCTION_VALUE (TREE_TYPE (decl_result
),
5288 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
5289 /* The delay slot scheduler assumes that current_function_return_rtx
5290 holds the hard register containing the return value, not a
5291 temporary pseudo. */
5292 current_function_return_rtx
= real_decl_rtl
;
5297 /* Indicate whether REGNO is an incoming argument to the current function
5298 that was promoted to a wider mode. If so, return the RTX for the
5299 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5300 that REGNO is promoted from and whether the promotion was signed or
5303 #ifdef PROMOTE_FUNCTION_ARGS
5306 promoted_input_arg (regno
, pmode
, punsignedp
)
5308 enum machine_mode
*pmode
;
5313 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
5314 arg
= TREE_CHAIN (arg
))
5315 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
5316 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
5317 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
5319 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
5320 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
5322 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
5323 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
5324 && mode
!= DECL_MODE (arg
))
5326 *pmode
= DECL_MODE (arg
);
5327 *punsignedp
= unsignedp
;
5328 return DECL_INCOMING_RTL (arg
);
5337 /* Compute the size and offset from the start of the stacked arguments for a
5338 parm passed in mode PASSED_MODE and with type TYPE.
5340 INITIAL_OFFSET_PTR points to the current offset into the stacked
5343 The starting offset and size for this parm are returned in
5344 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
5345 nonzero, the offset is that of stack slot, which is returned in
5346 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
5347 padding required from the initial offset ptr to the stack slot.
5349 IN_REGS is nonzero if the argument will be passed in registers. It will
5350 never be set if REG_PARM_STACK_SPACE is not defined.
5352 FNDECL is the function in which the argument was defined.
5354 There are two types of rounding that are done. The first, controlled by
5355 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5356 list to be aligned to the specific boundary (in bits). This rounding
5357 affects the initial and starting offsets, but not the argument size.
5359 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5360 optionally rounds the size of the parm to PARM_BOUNDARY. The
5361 initial offset is not affected by this rounding, while the size always
5362 is and the starting offset may be. */
5364 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
5365 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
5366 callers pass in the total size of args so far as
5367 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
5370 locate_and_pad_parm (passed_mode
, type
, in_regs
, partial
, fndecl
,
5371 initial_offset_ptr
, locate
)
5372 enum machine_mode passed_mode
;
5376 tree fndecl ATTRIBUTE_UNUSED
;
5377 struct args_size
*initial_offset_ptr
;
5378 struct locate_and_pad_arg_data
*locate
;
5381 enum direction where_pad
;
5383 int reg_parm_stack_space
= 0;
5384 int part_size_in_regs
;
5386 #ifdef REG_PARM_STACK_SPACE
5387 #ifdef MAYBE_REG_PARM_STACK_SPACE
5388 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
5390 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
5393 /* If we have found a stack parm before we reach the end of the
5394 area reserved for registers, skip that area. */
5397 if (reg_parm_stack_space
> 0)
5399 if (initial_offset_ptr
->var
)
5401 initial_offset_ptr
->var
5402 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
5403 ssize_int (reg_parm_stack_space
));
5404 initial_offset_ptr
->constant
= 0;
5406 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
5407 initial_offset_ptr
->constant
= reg_parm_stack_space
;
5410 #endif /* REG_PARM_STACK_SPACE */
5412 part_size_in_regs
= 0;
5413 if (reg_parm_stack_space
== 0)
5414 part_size_in_regs
= ((partial
* UNITS_PER_WORD
)
5415 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
5416 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
5419 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
5420 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
5421 boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
5423 #ifdef ARGS_GROW_DOWNWARD
5424 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
5425 if (initial_offset_ptr
->var
)
5426 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
5427 initial_offset_ptr
->var
);
5431 if (where_pad
!= none
5432 && (!host_integerp (sizetree
, 1)
5433 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
5434 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5435 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
5438 locate
->slot_offset
.constant
+= part_size_in_regs
;
5441 #ifdef REG_PARM_STACK_SPACE
5442 || REG_PARM_STACK_SPACE (fndecl
) > 0
5445 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
5446 &locate
->alignment_pad
);
5448 locate
->size
.constant
= (-initial_offset_ptr
->constant
5449 - locate
->slot_offset
.constant
);
5450 if (initial_offset_ptr
->var
)
5451 locate
->size
.var
= size_binop (MINUS_EXPR
,
5452 size_binop (MINUS_EXPR
,
5454 initial_offset_ptr
->var
),
5455 locate
->slot_offset
.var
);
5457 /* Pad_below needs the pre-rounded size to know how much to pad
5459 locate
->offset
= locate
->slot_offset
;
5460 if (where_pad
== downward
)
5461 pad_below (&locate
->offset
, passed_mode
, sizetree
);
5463 #else /* !ARGS_GROW_DOWNWARD */
5465 #ifdef REG_PARM_STACK_SPACE
5466 || REG_PARM_STACK_SPACE (fndecl
) > 0
5469 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
5470 &locate
->alignment_pad
);
5471 locate
->slot_offset
= *initial_offset_ptr
;
5473 #ifdef PUSH_ROUNDING
5474 if (passed_mode
!= BLKmode
)
5475 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5478 /* Pad_below needs the pre-rounded size to know how much to pad below
5479 so this must be done before rounding up. */
5480 locate
->offset
= locate
->slot_offset
;
5481 if (where_pad
== downward
)
5482 pad_below (&locate
->offset
, passed_mode
, sizetree
);
5484 if (where_pad
!= none
5485 && (!host_integerp (sizetree
, 1)
5486 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
5487 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5489 ADD_PARM_SIZE (locate
->size
, sizetree
);
5491 locate
->size
.constant
-= part_size_in_regs
;
5492 #endif /* ARGS_GROW_DOWNWARD */
5495 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5496 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5499 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
)
5500 struct args_size
*offset_ptr
;
5502 struct args_size
*alignment_pad
;
5504 tree save_var
= NULL_TREE
;
5505 HOST_WIDE_INT save_constant
= 0;
5507 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5509 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5511 save_var
= offset_ptr
->var
;
5512 save_constant
= offset_ptr
->constant
;
5515 alignment_pad
->var
= NULL_TREE
;
5516 alignment_pad
->constant
= 0;
5518 if (boundary
> BITS_PER_UNIT
)
5520 if (offset_ptr
->var
)
5523 #ifdef ARGS_GROW_DOWNWARD
5528 (ARGS_SIZE_TREE (*offset_ptr
),
5529 boundary
/ BITS_PER_UNIT
);
5530 /* ARGS_SIZE_TREE includes constant term. */
5531 offset_ptr
->constant
= 0;
5532 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5533 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
5538 offset_ptr
->constant
=
5539 #ifdef ARGS_GROW_DOWNWARD
5540 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5542 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5544 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5545 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5551 pad_below (offset_ptr
, passed_mode
, sizetree
)
5552 struct args_size
*offset_ptr
;
5553 enum machine_mode passed_mode
;
5556 if (passed_mode
!= BLKmode
)
5558 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5559 offset_ptr
->constant
5560 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5561 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5562 - GET_MODE_SIZE (passed_mode
));
5566 if (TREE_CODE (sizetree
) != INTEGER_CST
5567 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5569 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5570 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5572 ADD_PARM_SIZE (*offset_ptr
, s2
);
5573 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5578 /* Walk the tree of blocks describing the binding levels within a function
5579 and warn about uninitialized variables.
5580 This is done after calling flow_analysis and before global_alloc
5581 clobbers the pseudo-regs to hard regs. */
5584 uninitialized_vars_warning (block
)
5588 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5590 if (warn_uninitialized
5591 && TREE_CODE (decl
) == VAR_DECL
5592 /* These warnings are unreliable for and aggregates
5593 because assigning the fields one by one can fail to convince
5594 flow.c that the entire aggregate was initialized.
5595 Unions are troublesome because members may be shorter. */
5596 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5597 && DECL_RTL (decl
) != 0
5598 && GET_CODE (DECL_RTL (decl
)) == REG
5599 /* Global optimizations can make it difficult to determine if a
5600 particular variable has been initialized. However, a VAR_DECL
5601 with a nonzero DECL_INITIAL had an initializer, so do not
5602 claim it is potentially uninitialized.
5604 We do not care about the actual value in DECL_INITIAL, so we do
5605 not worry that it may be a dangling pointer. */
5606 && DECL_INITIAL (decl
) == NULL_TREE
5607 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
5608 warning_with_decl (decl
,
5609 "`%s' might be used uninitialized in this function");
5611 && TREE_CODE (decl
) == VAR_DECL
5612 && DECL_RTL (decl
) != 0
5613 && GET_CODE (DECL_RTL (decl
)) == REG
5614 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5615 warning_with_decl (decl
,
5616 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5618 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5619 uninitialized_vars_warning (sub
);
5622 /* Do the appropriate part of uninitialized_vars_warning
5623 but for arguments instead of local variables. */
5626 setjmp_args_warning ()
5629 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5630 decl
; decl
= TREE_CHAIN (decl
))
5631 if (DECL_RTL (decl
) != 0
5632 && GET_CODE (DECL_RTL (decl
)) == REG
5633 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5634 warning_with_decl (decl
,
5635 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5638 /* If this function call setjmp, put all vars into the stack
5639 unless they were declared `register'. */
5642 setjmp_protect (block
)
5646 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5647 if ((TREE_CODE (decl
) == VAR_DECL
5648 || TREE_CODE (decl
) == PARM_DECL
)
5649 && DECL_RTL (decl
) != 0
5650 && (GET_CODE (DECL_RTL (decl
)) == REG
5651 || (GET_CODE (DECL_RTL (decl
)) == MEM
5652 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5653 /* If this variable came from an inline function, it must be
5654 that its life doesn't overlap the setjmp. If there was a
5655 setjmp in the function, it would already be in memory. We
5656 must exclude such variable because their DECL_RTL might be
5657 set to strange things such as virtual_stack_vars_rtx. */
5658 && ! DECL_FROM_INLINE (decl
)
5660 #ifdef NON_SAVING_SETJMP
5661 /* If longjmp doesn't restore the registers,
5662 don't put anything in them. */
5666 ! DECL_REGISTER (decl
)))
5667 put_var_into_stack (decl
, /*rescan=*/true);
5668 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5669 setjmp_protect (sub
);
5672 /* Like the previous function, but for args instead of local variables. */
5675 setjmp_protect_args ()
5678 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5679 decl
; decl
= TREE_CHAIN (decl
))
5680 if ((TREE_CODE (decl
) == VAR_DECL
5681 || TREE_CODE (decl
) == PARM_DECL
)
5682 && DECL_RTL (decl
) != 0
5683 && (GET_CODE (DECL_RTL (decl
)) == REG
5684 || (GET_CODE (DECL_RTL (decl
)) == MEM
5685 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5687 /* If longjmp doesn't restore the registers,
5688 don't put anything in them. */
5689 #ifdef NON_SAVING_SETJMP
5693 ! DECL_REGISTER (decl
)))
5694 put_var_into_stack (decl
, /*rescan=*/true);
5697 /* Return the context-pointer register corresponding to DECL,
5698 or 0 if it does not need one. */
5701 lookup_static_chain (decl
)
5704 tree context
= decl_function_context (decl
);
5708 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
5711 /* We treat inline_function_decl as an alias for the current function
5712 because that is the inline function whose vars, types, etc.
5713 are being merged into the current function.
5714 See expand_inline_function. */
5715 if (context
== current_function_decl
|| context
== inline_function_decl
)
5716 return virtual_stack_vars_rtx
;
5718 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5719 if (TREE_PURPOSE (link
) == context
)
5720 return RTL_EXPR_RTL (TREE_VALUE (link
));
5725 /* Convert a stack slot address ADDR for variable VAR
5726 (from a containing function)
5727 into an address valid in this function (using a static chain). */
5730 fix_lexical_addr (addr
, var
)
5735 HOST_WIDE_INT displacement
;
5736 tree context
= decl_function_context (var
);
5737 struct function
*fp
;
5740 /* If this is the present function, we need not do anything. */
5741 if (context
== current_function_decl
|| context
== inline_function_decl
)
5744 fp
= find_function_data (context
);
5746 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5747 addr
= XEXP (XEXP (addr
, 0), 0);
5749 /* Decode given address as base reg plus displacement. */
5750 if (GET_CODE (addr
) == REG
)
5751 basereg
= addr
, displacement
= 0;
5752 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5753 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5757 /* We accept vars reached via the containing function's
5758 incoming arg pointer and via its stack variables pointer. */
5759 if (basereg
== fp
->internal_arg_pointer
)
5761 /* If reached via arg pointer, get the arg pointer value
5762 out of that function's stack frame.
5764 There are two cases: If a separate ap is needed, allocate a
5765 slot in the outer function for it and dereference it that way.
5766 This is correct even if the real ap is actually a pseudo.
5767 Otherwise, just adjust the offset from the frame pointer to
5770 #ifdef NEED_SEPARATE_AP
5773 addr
= get_arg_pointer_save_area (fp
);
5774 addr
= fix_lexical_addr (XEXP (addr
, 0), var
);
5775 addr
= memory_address (Pmode
, addr
);
5777 base
= gen_rtx_MEM (Pmode
, addr
);
5778 set_mem_alias_set (base
, get_frame_alias_set ());
5779 base
= copy_to_reg (base
);
5781 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
5782 base
= lookup_static_chain (var
);
5786 else if (basereg
== virtual_stack_vars_rtx
)
5788 /* This is the same code as lookup_static_chain, duplicated here to
5789 avoid an extra call to decl_function_context. */
5792 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5793 if (TREE_PURPOSE (link
) == context
)
5795 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
5803 /* Use same offset, relative to appropriate static chain or argument
5805 return plus_constant (base
, displacement
);
5808 /* Return the address of the trampoline for entering nested fn FUNCTION.
5809 If necessary, allocate a trampoline (in the stack frame)
5810 and emit rtl to initialize its contents (at entry to this function). */
5813 trampoline_address (function
)
5819 struct function
*fp
;
5822 /* Find an existing trampoline and return it. */
5823 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5824 if (TREE_PURPOSE (link
) == function
)
5826 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5828 for (fp
= outer_function_chain
; fp
; fp
= fp
->outer
)
5829 for (link
= fp
->x_trampoline_list
; link
; link
= TREE_CHAIN (link
))
5830 if (TREE_PURPOSE (link
) == function
)
5832 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5834 return adjust_trampoline_addr (tramp
);
5837 /* None exists; we must make one. */
5839 /* Find the `struct function' for the function containing FUNCTION. */
5841 fn_context
= decl_function_context (function
);
5842 if (fn_context
!= current_function_decl
5843 && fn_context
!= inline_function_decl
)
5844 fp
= find_function_data (fn_context
);
5846 /* Allocate run-time space for this trampoline
5847 (usually in the defining function's stack frame). */
5848 #ifdef ALLOCATE_TRAMPOLINE
5849 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5851 /* If rounding needed, allocate extra space
5852 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5853 #define TRAMPOLINE_REAL_SIZE \
5854 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5855 tramp
= assign_stack_local_1 (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0,
5859 /* Record the trampoline for reuse and note it for later initialization
5860 by expand_function_end. */
5863 rtlexp
= make_node (RTL_EXPR
);
5864 RTL_EXPR_RTL (rtlexp
) = tramp
;
5865 fp
->x_trampoline_list
= tree_cons (function
, rtlexp
,
5866 fp
->x_trampoline_list
);
5870 /* Make the RTL_EXPR node temporary, not momentary, so that the
5871 trampoline_list doesn't become garbage. */
5872 rtlexp
= make_node (RTL_EXPR
);
5874 RTL_EXPR_RTL (rtlexp
) = tramp
;
5875 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5878 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5879 return adjust_trampoline_addr (tramp
);
5882 /* Given a trampoline address,
5883 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5886 round_trampoline_addr (tramp
)
5889 /* Round address up to desired boundary. */
5890 rtx temp
= gen_reg_rtx (Pmode
);
5891 rtx addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5892 rtx mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5894 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5895 temp
, 0, OPTAB_LIB_WIDEN
);
5896 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5897 temp
, 0, OPTAB_LIB_WIDEN
);
5902 /* Given a trampoline address, round it then apply any
5903 platform-specific adjustments so that the result can be used for a
5907 adjust_trampoline_addr (tramp
)
5910 tramp
= round_trampoline_addr (tramp
);
5911 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5912 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5917 /* Put all this function's BLOCK nodes including those that are chained
5918 onto the first block into a vector, and return it.
5919 Also store in each NOTE for the beginning or end of a block
5920 the index of that block in the vector.
5921 The arguments are BLOCK, the chain of top-level blocks of the function,
5922 and INSNS, the insn chain of the function. */
5928 tree
*block_vector
, *last_block_vector
;
5930 tree block
= DECL_INITIAL (current_function_decl
);
5935 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5936 depth-first order. */
5937 block_vector
= get_block_vector (block
, &n_blocks
);
5938 block_stack
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5940 last_block_vector
= identify_blocks_1 (get_insns (),
5942 block_vector
+ n_blocks
,
5945 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5946 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5947 if (0 && last_block_vector
!= block_vector
+ n_blocks
)
5950 free (block_vector
);
5954 /* Subroutine of identify_blocks. Do the block substitution on the
5955 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5957 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5958 BLOCK_VECTOR is incremented for each block seen. */
5961 identify_blocks_1 (insns
, block_vector
, end_block_vector
, orig_block_stack
)
5964 tree
*end_block_vector
;
5965 tree
*orig_block_stack
;
5968 tree
*block_stack
= orig_block_stack
;
5970 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5972 if (GET_CODE (insn
) == NOTE
)
5974 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5978 /* If there are more block notes than BLOCKs, something
5980 if (block_vector
== end_block_vector
)
5983 b
= *block_vector
++;
5984 NOTE_BLOCK (insn
) = b
;
5987 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5989 /* If there are more NOTE_INSN_BLOCK_ENDs than
5990 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5991 if (block_stack
== orig_block_stack
)
5994 NOTE_BLOCK (insn
) = *--block_stack
;
5997 else if (GET_CODE (insn
) == CALL_INSN
5998 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
6000 rtx cp
= PATTERN (insn
);
6002 block_vector
= identify_blocks_1 (XEXP (cp
, 0), block_vector
,
6003 end_block_vector
, block_stack
);
6005 block_vector
= identify_blocks_1 (XEXP (cp
, 1), block_vector
,
6006 end_block_vector
, block_stack
);
6008 block_vector
= identify_blocks_1 (XEXP (cp
, 2), block_vector
,
6009 end_block_vector
, block_stack
);
6013 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
6014 something is badly wrong. */
6015 if (block_stack
!= orig_block_stack
)
6018 return block_vector
;
6021 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
6022 and create duplicate blocks. */
6023 /* ??? Need an option to either create block fragments or to create
6024 abstract origin duplicates of a source block. It really depends
6025 on what optimization has been performed. */
6030 tree block
= DECL_INITIAL (current_function_decl
);
6031 varray_type block_stack
;
6033 if (block
== NULL_TREE
)
6036 VARRAY_TREE_INIT (block_stack
, 10, "block_stack");
6038 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6039 reorder_blocks_0 (block
);
6041 /* Prune the old trees away, so that they don't get in the way. */
6042 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
6043 BLOCK_CHAIN (block
) = NULL_TREE
;
6045 /* Recreate the block tree from the note nesting. */
6046 reorder_blocks_1 (get_insns (), block
, &block_stack
);
6047 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
6049 /* Remove deleted blocks from the block fragment chains. */
6050 reorder_fix_fragments (block
);
6053 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
6056 reorder_blocks_0 (block
)
6061 TREE_ASM_WRITTEN (block
) = 0;
6062 reorder_blocks_0 (BLOCK_SUBBLOCKS (block
));
6063 block
= BLOCK_CHAIN (block
);
6068 reorder_blocks_1 (insns
, current_block
, p_block_stack
)
6071 varray_type
*p_block_stack
;
6075 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6077 if (GET_CODE (insn
) == NOTE
)
6079 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
6081 tree block
= NOTE_BLOCK (insn
);
6083 /* If we have seen this block before, that means it now
6084 spans multiple address regions. Create a new fragment. */
6085 if (TREE_ASM_WRITTEN (block
))
6087 tree new_block
= copy_node (block
);
6090 origin
= (BLOCK_FRAGMENT_ORIGIN (block
)
6091 ? BLOCK_FRAGMENT_ORIGIN (block
)
6093 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
6094 BLOCK_FRAGMENT_CHAIN (new_block
)
6095 = BLOCK_FRAGMENT_CHAIN (origin
);
6096 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
6098 NOTE_BLOCK (insn
) = new_block
;
6102 BLOCK_SUBBLOCKS (block
) = 0;
6103 TREE_ASM_WRITTEN (block
) = 1;
6104 /* When there's only one block for the entire function,
6105 current_block == block and we mustn't do this, it
6106 will cause infinite recursion. */
6107 if (block
!= current_block
)
6109 BLOCK_SUPERCONTEXT (block
) = current_block
;
6110 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
6111 BLOCK_SUBBLOCKS (current_block
) = block
;
6112 current_block
= block
;
6114 VARRAY_PUSH_TREE (*p_block_stack
, block
);
6116 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
6118 NOTE_BLOCK (insn
) = VARRAY_TOP_TREE (*p_block_stack
);
6119 VARRAY_POP (*p_block_stack
);
6120 BLOCK_SUBBLOCKS (current_block
)
6121 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
6122 current_block
= BLOCK_SUPERCONTEXT (current_block
);
6125 else if (GET_CODE (insn
) == CALL_INSN
6126 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
6128 rtx cp
= PATTERN (insn
);
6129 reorder_blocks_1 (XEXP (cp
, 0), current_block
, p_block_stack
);
6131 reorder_blocks_1 (XEXP (cp
, 1), current_block
, p_block_stack
);
6133 reorder_blocks_1 (XEXP (cp
, 2), current_block
, p_block_stack
);
6138 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
6139 appears in the block tree, select one of the fragments to become
6140 the new origin block. */
6143 reorder_fix_fragments (block
)
6148 tree dup_origin
= BLOCK_FRAGMENT_ORIGIN (block
);
6149 tree new_origin
= NULL_TREE
;
6153 if (! TREE_ASM_WRITTEN (dup_origin
))
6155 new_origin
= BLOCK_FRAGMENT_CHAIN (dup_origin
);
6157 /* Find the first of the remaining fragments. There must
6158 be at least one -- the current block. */
6159 while (! TREE_ASM_WRITTEN (new_origin
))
6160 new_origin
= BLOCK_FRAGMENT_CHAIN (new_origin
);
6161 BLOCK_FRAGMENT_ORIGIN (new_origin
) = NULL_TREE
;
6164 else if (! dup_origin
)
6167 /* Re-root the rest of the fragments to the new origin. In the
6168 case that DUP_ORIGIN was null, that means BLOCK was the origin
6169 of a chain of fragments and we want to remove those fragments
6170 that didn't make it to the output. */
6173 tree
*pp
= &BLOCK_FRAGMENT_CHAIN (new_origin
);
6178 if (TREE_ASM_WRITTEN (chain
))
6180 BLOCK_FRAGMENT_ORIGIN (chain
) = new_origin
;
6182 pp
= &BLOCK_FRAGMENT_CHAIN (chain
);
6184 chain
= BLOCK_FRAGMENT_CHAIN (chain
);
6189 reorder_fix_fragments (BLOCK_SUBBLOCKS (block
));
6190 block
= BLOCK_CHAIN (block
);
6194 /* Reverse the order of elements in the chain T of blocks,
6195 and return the new head of the chain (old last element). */
6201 tree prev
= 0, decl
, next
;
6202 for (decl
= t
; decl
; decl
= next
)
6204 next
= BLOCK_CHAIN (decl
);
6205 BLOCK_CHAIN (decl
) = prev
;
6211 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
6212 non-NULL, list them all into VECTOR, in a depth-first preorder
6213 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
6217 all_blocks (block
, vector
)
6225 TREE_ASM_WRITTEN (block
) = 0;
6227 /* Record this block. */
6229 vector
[n_blocks
] = block
;
6233 /* Record the subblocks, and their subblocks... */
6234 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
6235 vector
? vector
+ n_blocks
: 0);
6236 block
= BLOCK_CHAIN (block
);
6242 /* Return a vector containing all the blocks rooted at BLOCK. The
6243 number of elements in the vector is stored in N_BLOCKS_P. The
6244 vector is dynamically allocated; it is the caller's responsibility
6245 to call `free' on the pointer returned. */
6248 get_block_vector (block
, n_blocks_p
)
6254 *n_blocks_p
= all_blocks (block
, NULL
);
6255 block_vector
= (tree
*) xmalloc (*n_blocks_p
* sizeof (tree
));
6256 all_blocks (block
, block_vector
);
6258 return block_vector
;
6261 static GTY(()) int next_block_index
= 2;
6263 /* Set BLOCK_NUMBER for all the blocks in FN. */
6273 /* For SDB and XCOFF debugging output, we start numbering the blocks
6274 from 1 within each function, rather than keeping a running
6276 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6277 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
6278 next_block_index
= 1;
6281 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
6283 /* The top-level BLOCK isn't numbered at all. */
6284 for (i
= 1; i
< n_blocks
; ++i
)
6285 /* We number the blocks from two. */
6286 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
6288 free (block_vector
);
6293 /* If VAR is present in a subblock of BLOCK, return the subblock. */
6296 debug_find_var_in_block_tree (var
, block
)
6302 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
6306 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
6308 tree ret
= debug_find_var_in_block_tree (var
, t
);
6316 /* Allocate a function structure and reset its contents to the defaults. */
6319 prepare_function_start ()
6321 cfun
= (struct function
*) ggc_alloc_cleared (sizeof (struct function
));
6323 init_stmt_for_function ();
6324 init_eh_for_function ();
6326 cse_not_expected
= ! optimize
;
6328 /* Caller save not needed yet. */
6329 caller_save_needed
= 0;
6331 /* No stack slots have been made yet. */
6332 stack_slot_list
= 0;
6334 current_function_has_nonlocal_label
= 0;
6335 current_function_has_nonlocal_goto
= 0;
6337 /* There is no stack slot for handling nonlocal gotos. */
6338 nonlocal_goto_handler_slots
= 0;
6339 nonlocal_goto_stack_level
= 0;
6341 /* No labels have been declared for nonlocal use. */
6342 nonlocal_labels
= 0;
6343 nonlocal_goto_handler_labels
= 0;
6345 /* No function calls so far in this function. */
6346 function_call_count
= 0;
6348 /* No parm regs have been allocated.
6349 (This is important for output_inline_function.) */
6350 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
6352 /* Initialize the RTL mechanism. */
6355 /* Initialize the queue of pending postincrement and postdecrements,
6356 and some other info in expr.c. */
6359 /* We haven't done register allocation yet. */
6362 init_varasm_status (cfun
);
6364 /* Clear out data used for inlining. */
6365 cfun
->inlinable
= 0;
6366 cfun
->original_decl_initial
= 0;
6367 cfun
->original_arg_vector
= 0;
6369 cfun
->stack_alignment_needed
= STACK_BOUNDARY
;
6370 cfun
->preferred_stack_boundary
= STACK_BOUNDARY
;
6372 /* Set if a call to setjmp is seen. */
6373 current_function_calls_setjmp
= 0;
6375 /* Set if a call to longjmp is seen. */
6376 current_function_calls_longjmp
= 0;
6378 current_function_calls_alloca
= 0;
6379 current_function_calls_eh_return
= 0;
6380 current_function_calls_constant_p
= 0;
6381 current_function_contains_functions
= 0;
6382 current_function_is_leaf
= 0;
6383 current_function_nothrow
= 0;
6384 current_function_sp_is_unchanging
= 0;
6385 current_function_uses_only_leaf_regs
= 0;
6386 current_function_has_computed_jump
= 0;
6387 current_function_is_thunk
= 0;
6389 current_function_returns_pcc_struct
= 0;
6390 current_function_returns_struct
= 0;
6391 current_function_epilogue_delay_list
= 0;
6392 current_function_uses_const_pool
= 0;
6393 current_function_uses_pic_offset_table
= 0;
6394 current_function_cannot_inline
= 0;
6396 /* We have not yet needed to make a label to jump to for tail-recursion. */
6397 tail_recursion_label
= 0;
6399 /* We haven't had a need to make a save area for ap yet. */
6400 arg_pointer_save_area
= 0;
6402 /* No stack slots allocated yet. */
6405 /* No SAVE_EXPRs in this function yet. */
6408 /* No RTL_EXPRs in this function yet. */
6411 /* Set up to allocate temporaries. */
6414 /* Indicate that we need to distinguish between the return value of the
6415 present function and the return value of a function being called. */
6416 rtx_equal_function_value_matters
= 1;
6418 /* Indicate that we have not instantiated virtual registers yet. */
6419 virtuals_instantiated
= 0;
6421 /* Indicate that we want CONCATs now. */
6422 generating_concat_p
= 1;
6424 /* Indicate we have no need of a frame pointer yet. */
6425 frame_pointer_needed
= 0;
6427 /* By default assume not stdarg. */
6428 current_function_stdarg
= 0;
6430 /* We haven't made any trampolines for this function yet. */
6431 trampoline_list
= 0;
6433 init_pending_stack_adjust ();
6434 inhibit_defer_pop
= 0;
6436 current_function_outgoing_args_size
= 0;
6438 current_function_funcdef_no
= funcdef_no
++;
6440 cfun
->function_frequency
= FUNCTION_FREQUENCY_NORMAL
;
6442 cfun
->max_jumptable_ents
= 0;
6444 (*lang_hooks
.function
.init
) (cfun
);
6445 if (init_machine_status
)
6446 cfun
->machine
= (*init_machine_status
) ();
6449 /* Initialize the rtl expansion mechanism so that we can do simple things
6450 like generate sequences. This is used to provide a context during global
6451 initialization of some passes. */
6453 init_dummy_function_start ()
6455 prepare_function_start ();
6458 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6459 and initialize static variables for generating RTL for the statements
6463 init_function_start (subr
, filename
, line
)
6465 const char *filename
;
6468 prepare_function_start ();
6470 current_function_name
= (*lang_hooks
.decl_printable_name
) (subr
, 2);
6473 /* Nonzero if this is a nested function that uses a static chain. */
6475 current_function_needs_context
6476 = (decl_function_context (current_function_decl
) != 0
6477 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
6479 /* Within function body, compute a type's size as soon it is laid out. */
6480 immediate_size_expand
++;
6482 /* Prevent ever trying to delete the first instruction of a function.
6483 Also tell final how to output a linenum before the function prologue.
6484 Note linenums could be missing, e.g. when compiling a Java .class file. */
6486 emit_line_note (filename
, line
);
6488 /* Make sure first insn is a note even if we don't want linenums.
6489 This makes sure the first insn will never be deleted.
6490 Also, final expects a note to appear there. */
6491 emit_note (NULL
, NOTE_INSN_DELETED
);
6493 /* Set flags used by final.c. */
6494 if (aggregate_value_p (DECL_RESULT (subr
)))
6496 #ifdef PCC_STATIC_STRUCT_RETURN
6497 current_function_returns_pcc_struct
= 1;
6499 current_function_returns_struct
= 1;
6502 /* Warn if this value is an aggregate type,
6503 regardless of which calling convention we are using for it. */
6504 if (warn_aggregate_return
6505 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
6506 warning ("function returns an aggregate");
6508 current_function_returns_pointer
6509 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
6512 /* Make sure all values used by the optimization passes have sane
6515 init_function_for_compilation ()
6519 /* No prologue/epilogue insns yet. */
6520 VARRAY_GROW (prologue
, 0);
6521 VARRAY_GROW (epilogue
, 0);
6522 VARRAY_GROW (sibcall_epilogue
, 0);
6525 /* Expand a call to __main at the beginning of a possible main function. */
6527 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6528 #undef HAS_INIT_SECTION
6529 #define HAS_INIT_SECTION
6533 expand_main_function ()
6535 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6536 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
)
6538 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
6542 /* Forcibly align the stack. */
6543 #ifdef STACK_GROWS_DOWNWARD
6544 tmp
= expand_simple_binop (Pmode
, AND
, stack_pointer_rtx
, GEN_INT(-align
),
6545 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
6547 tmp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
6548 GEN_INT (align
- 1), NULL_RTX
, 1, OPTAB_WIDEN
);
6549 tmp
= expand_simple_binop (Pmode
, AND
, tmp
, GEN_INT (-align
),
6550 stack_pointer_rtx
, 1, OPTAB_WIDEN
);
6552 if (tmp
!= stack_pointer_rtx
)
6553 emit_move_insn (stack_pointer_rtx
, tmp
);
6555 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
6556 tmp
= force_reg (Pmode
, const0_rtx
);
6557 allocate_dynamic_stack_space (tmp
, NULL_RTX
, BIGGEST_ALIGNMENT
);
6561 for (tmp
= get_last_insn (); tmp
; tmp
= PREV_INSN (tmp
))
6562 if (NOTE_P (tmp
) && NOTE_LINE_NUMBER (tmp
) == NOTE_INSN_FUNCTION_BEG
)
6565 emit_insn_before (seq
, tmp
);
6571 #ifndef HAS_INIT_SECTION
6572 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
6576 /* The PENDING_SIZES represent the sizes of variable-sized types.
6577 Create RTL for the various sizes now (using temporary variables),
6578 so that we can refer to the sizes from the RTL we are generating
6579 for the current function. The PENDING_SIZES are a TREE_LIST. The
6580 TREE_VALUE of each node is a SAVE_EXPR. */
6583 expand_pending_sizes (pending_sizes
)
6588 /* Evaluate now the sizes of any types declared among the arguments. */
6589 for (tem
= pending_sizes
; tem
; tem
= TREE_CHAIN (tem
))
6591 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
6592 /* Flush the queue in case this parameter declaration has
6598 /* Start the RTL for a new function, and set variables used for
6600 SUBR is the FUNCTION_DECL node.
6601 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6602 the function's parameters, which must be run at any return statement. */
6605 expand_function_start (subr
, parms_have_cleanups
)
6607 int parms_have_cleanups
;
6610 rtx last_ptr
= NULL_RTX
;
6612 /* Make sure volatile mem refs aren't considered
6613 valid operands of arithmetic insns. */
6614 init_recog_no_volatile ();
6616 current_function_instrument_entry_exit
6617 = (flag_instrument_function_entry_exit
6618 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
6620 current_function_profile
6622 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
6624 current_function_limit_stack
6625 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
6627 /* If function gets a static chain arg, store it in the stack frame.
6628 Do this first, so it gets the first stack slot offset. */
6629 if (current_function_needs_context
)
6631 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
6633 /* Delay copying static chain if it is not a register to avoid
6634 conflicts with regs used for parameters. */
6635 if (! SMALL_REGISTER_CLASSES
6636 || GET_CODE (static_chain_incoming_rtx
) == REG
)
6637 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6640 /* If the parameters of this function need cleaning up, get a label
6641 for the beginning of the code which executes those cleanups. This must
6642 be done before doing anything with return_label. */
6643 if (parms_have_cleanups
)
6644 cleanup_label
= gen_label_rtx ();
6648 /* Make the label for return statements to jump to. Do not special
6649 case machines with special return instructions -- they will be
6650 handled later during jump, ifcvt, or epilogue creation. */
6651 return_label
= gen_label_rtx ();
6653 /* Initialize rtx used to return the value. */
6654 /* Do this before assign_parms so that we copy the struct value address
6655 before any library calls that assign parms might generate. */
6657 /* Decide whether to return the value in memory or in a register. */
6658 if (aggregate_value_p (DECL_RESULT (subr
)))
6660 /* Returning something that won't go in a register. */
6661 rtx value_address
= 0;
6663 #ifdef PCC_STATIC_STRUCT_RETURN
6664 if (current_function_returns_pcc_struct
)
6666 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
6667 value_address
= assemble_static_space (size
);
6672 /* Expect to be passed the address of a place to store the value.
6673 If it is passed as an argument, assign_parms will take care of
6675 if (struct_value_incoming_rtx
)
6677 value_address
= gen_reg_rtx (Pmode
);
6678 emit_move_insn (value_address
, struct_value_incoming_rtx
);
6683 rtx x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
6684 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
6685 SET_DECL_RTL (DECL_RESULT (subr
), x
);
6688 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
6689 /* If return mode is void, this decl rtl should not be used. */
6690 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
6693 /* Compute the return values into a pseudo reg, which we will copy
6694 into the true return register after the cleanups are done. */
6696 /* In order to figure out what mode to use for the pseudo, we
6697 figure out what the mode of the eventual return register will
6698 actually be, and use that. */
6700 = hard_function_value (TREE_TYPE (DECL_RESULT (subr
)),
6703 /* Structures that are returned in registers are not aggregate_value_p,
6704 so we may see a PARALLEL or a REG. */
6705 if (REG_P (hard_reg
))
6706 SET_DECL_RTL (DECL_RESULT (subr
), gen_reg_rtx (GET_MODE (hard_reg
)));
6707 else if (GET_CODE (hard_reg
) == PARALLEL
)
6708 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
6712 /* Set DECL_REGISTER flag so that expand_function_end will copy the
6713 result to the real return register(s). */
6714 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6717 /* Initialize rtx for parameters and local variables.
6718 In some cases this requires emitting insns. */
6720 assign_parms (subr
);
6722 /* Copy the static chain now if it wasn't a register. The delay is to
6723 avoid conflicts with the parameter passing registers. */
6725 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
6726 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
6727 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6729 /* The following was moved from init_function_start.
6730 The move is supposed to make sdb output more accurate. */
6731 /* Indicate the beginning of the function body,
6732 as opposed to parm setup. */
6733 emit_note (NULL
, NOTE_INSN_FUNCTION_BEG
);
6735 if (GET_CODE (get_last_insn ()) != NOTE
)
6736 emit_note (NULL
, NOTE_INSN_DELETED
);
6737 parm_birth_insn
= get_last_insn ();
6739 context_display
= 0;
6740 if (current_function_needs_context
)
6742 /* Fetch static chain values for containing functions. */
6743 tem
= decl_function_context (current_function_decl
);
6744 /* Copy the static chain pointer into a pseudo. If we have
6745 small register classes, copy the value from memory if
6746 static_chain_incoming_rtx is a REG. */
6749 /* If the static chain originally came in a register, put it back
6750 there, then move it out in the next insn. The reason for
6751 this peculiar code is to satisfy function integration. */
6752 if (SMALL_REGISTER_CLASSES
6753 && GET_CODE (static_chain_incoming_rtx
) == REG
)
6754 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
6755 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
6760 tree rtlexp
= make_node (RTL_EXPR
);
6762 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
6763 context_display
= tree_cons (tem
, rtlexp
, context_display
);
6764 tem
= decl_function_context (tem
);
6767 /* Chain thru stack frames, assuming pointer to next lexical frame
6768 is found at the place we always store it. */
6769 #ifdef FRAME_GROWS_DOWNWARD
6770 last_ptr
= plus_constant (last_ptr
,
6771 -(HOST_WIDE_INT
) GET_MODE_SIZE (Pmode
));
6773 last_ptr
= gen_rtx_MEM (Pmode
, memory_address (Pmode
, last_ptr
));
6774 set_mem_alias_set (last_ptr
, get_frame_alias_set ());
6775 last_ptr
= copy_to_reg (last_ptr
);
6777 /* If we are not optimizing, ensure that we know that this
6778 piece of context is live over the entire function. */
6780 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
6785 if (current_function_instrument_entry_exit
)
6787 rtx fun
= DECL_RTL (current_function_decl
);
6788 if (GET_CODE (fun
) == MEM
)
6789 fun
= XEXP (fun
, 0);
6792 emit_library_call (profile_function_entry_libfunc
, LCT_NORMAL
, VOIDmode
,
6794 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6796 hard_frame_pointer_rtx
),
6800 if (current_function_profile
)
6803 PROFILE_HOOK (current_function_funcdef_no
);
6807 /* After the display initializations is where the tail-recursion label
6808 should go, if we end up needing one. Ensure we have a NOTE here
6809 since some things (like trampolines) get placed before this. */
6810 tail_recursion_reentry
= emit_note (NULL
, NOTE_INSN_DELETED
);
6812 /* Evaluate now the sizes of any types declared among the arguments. */
6813 expand_pending_sizes (nreverse (get_pending_sizes ()));
6815 /* Make sure there is a line number after the function entry setup code. */
6816 force_next_line_note ();
6819 /* Undo the effects of init_dummy_function_start. */
6821 expand_dummy_function_end ()
6823 /* End any sequences that failed to be closed due to syntax errors. */
6824 while (in_sequence_p ())
6827 /* Outside function body, can't compute type's actual size
6828 until next function's body starts. */
6830 free_after_parsing (cfun
);
6831 free_after_compilation (cfun
);
6835 /* Call DOIT for each hard register used as a return value from
6836 the current function. */
6839 diddle_return_value (doit
, arg
)
6840 void (*doit
) PARAMS ((rtx
, void *));
6843 rtx outgoing
= current_function_return_rtx
;
6848 if (GET_CODE (outgoing
) == REG
)
6849 (*doit
) (outgoing
, arg
);
6850 else if (GET_CODE (outgoing
) == PARALLEL
)
6854 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
6856 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
6858 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6865 do_clobber_return_reg (reg
, arg
)
6867 void *arg ATTRIBUTE_UNUSED
;
6869 emit_insn (gen_rtx_CLOBBER (VOIDmode
, reg
));
6873 clobber_return_register ()
6875 diddle_return_value (do_clobber_return_reg
, NULL
);
6877 /* In case we do use pseudo to return value, clobber it too. */
6878 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
6880 tree decl_result
= DECL_RESULT (current_function_decl
);
6881 rtx decl_rtl
= DECL_RTL (decl_result
);
6882 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
6884 do_clobber_return_reg (decl_rtl
, NULL
);
6890 do_use_return_reg (reg
, arg
)
6892 void *arg ATTRIBUTE_UNUSED
;
6894 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
6898 use_return_register ()
6900 diddle_return_value (do_use_return_reg
, NULL
);
6903 static GTY(()) rtx initial_trampoline
;
6905 /* Generate RTL for the end of the current function.
6906 FILENAME and LINE are the current position in the source file.
6908 It is up to language-specific callers to do cleanups for parameters--
6909 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6912 expand_function_end (filename
, line
, end_bindings
)
6913 const char *filename
;
6920 finish_expr_for_function ();
6922 /* If arg_pointer_save_area was referenced only from a nested
6923 function, we will not have initialized it yet. Do that now. */
6924 if (arg_pointer_save_area
&& ! cfun
->arg_pointer_save_area_init
)
6925 get_arg_pointer_save_area (cfun
);
6927 #ifdef NON_SAVING_SETJMP
6928 /* Don't put any variables in registers if we call setjmp
6929 on a machine that fails to restore the registers. */
6930 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6932 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6933 setjmp_protect (DECL_INITIAL (current_function_decl
));
6935 setjmp_protect_args ();
6939 /* Initialize any trampolines required by this function. */
6940 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
6942 tree function
= TREE_PURPOSE (link
);
6943 rtx context ATTRIBUTE_UNUSED
= lookup_static_chain (function
);
6944 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
6945 #ifdef TRAMPOLINE_TEMPLATE
6950 #ifdef TRAMPOLINE_TEMPLATE
6951 /* First make sure this compilation has a template for
6952 initializing trampolines. */
6953 if (initial_trampoline
== 0)
6956 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
6957 set_mem_align (initial_trampoline
, TRAMPOLINE_ALIGNMENT
);
6961 /* Generate insns to initialize the trampoline. */
6963 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
6964 #ifdef TRAMPOLINE_TEMPLATE
6965 blktramp
= replace_equiv_address (initial_trampoline
, tramp
);
6966 emit_block_move (blktramp
, initial_trampoline
,
6967 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
6969 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
6973 /* Put those insns at entry to the containing function (this one). */
6974 emit_insn_before (seq
, tail_recursion_reentry
);
6977 /* If we are doing stack checking and this function makes calls,
6978 do a stack probe at the start of the function to ensure we have enough
6979 space for another stack frame. */
6980 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6984 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6985 if (GET_CODE (insn
) == CALL_INSN
)
6988 probe_stack_range (STACK_CHECK_PROTECT
,
6989 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6992 emit_insn_before (seq
, tail_recursion_reentry
);
6997 /* Possibly warn about unused parameters. */
6998 if (warn_unused_parameter
)
7002 for (decl
= DECL_ARGUMENTS (current_function_decl
);
7003 decl
; decl
= TREE_CHAIN (decl
))
7004 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
7005 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
7006 warning_with_decl (decl
, "unused parameter `%s'");
7009 /* Delete handlers for nonlocal gotos if nothing uses them. */
7010 if (nonlocal_goto_handler_slots
!= 0
7011 && ! current_function_has_nonlocal_label
)
7014 /* End any sequences that failed to be closed due to syntax errors. */
7015 while (in_sequence_p ())
7018 /* Outside function body, can't compute type's actual size
7019 until next function's body starts. */
7020 immediate_size_expand
--;
7022 clear_pending_stack_adjust ();
7023 do_pending_stack_adjust ();
7025 /* Mark the end of the function body.
7026 If control reaches this insn, the function can drop through
7027 without returning a value. */
7028 emit_note (NULL
, NOTE_INSN_FUNCTION_END
);
7030 /* Must mark the last line number note in the function, so that the test
7031 coverage code can avoid counting the last line twice. This just tells
7032 the code to ignore the immediately following line note, since there
7033 already exists a copy of this note somewhere above. This line number
7034 note is still needed for debugging though, so we can't delete it. */
7035 if (flag_test_coverage
)
7036 emit_note (NULL
, NOTE_INSN_REPEATED_LINE_NUMBER
);
7038 /* Output a linenumber for the end of the function.
7039 SDB depends on this. */
7040 emit_line_note_force (filename
, line
);
7042 /* Before the return label (if any), clobber the return
7043 registers so that they are not propagated live to the rest of
7044 the function. This can only happen with functions that drop
7045 through; if there had been a return statement, there would
7046 have either been a return rtx, or a jump to the return label.
7048 We delay actual code generation after the current_function_value_rtx
7050 clobber_after
= get_last_insn ();
7052 /* Output the label for the actual return from the function,
7053 if one is expected. This happens either because a function epilogue
7054 is used instead of a return instruction, or because a return was done
7055 with a goto in order to run local cleanups, or because of pcc-style
7056 structure returning. */
7058 emit_label (return_label
);
7060 /* C++ uses this. */
7062 expand_end_bindings (0, 0, 0);
7064 if (current_function_instrument_entry_exit
)
7066 rtx fun
= DECL_RTL (current_function_decl
);
7067 if (GET_CODE (fun
) == MEM
)
7068 fun
= XEXP (fun
, 0);
7071 emit_library_call (profile_function_exit_libfunc
, LCT_NORMAL
, VOIDmode
,
7073 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
7075 hard_frame_pointer_rtx
),
7079 /* Let except.c know where it should emit the call to unregister
7080 the function context for sjlj exceptions. */
7081 if (flag_exceptions
&& USING_SJLJ_EXCEPTIONS
)
7082 sjlj_emit_function_exit_after (get_last_insn ());
7084 /* If we had calls to alloca, and this machine needs
7085 an accurate stack pointer to exit the function,
7086 insert some code to save and restore the stack pointer. */
7087 #ifdef EXIT_IGNORE_STACK
7088 if (! EXIT_IGNORE_STACK
)
7090 if (current_function_calls_alloca
)
7094 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
7095 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
7098 /* If scalar return value was computed in a pseudo-reg, or was a named
7099 return value that got dumped to the stack, copy that to the hard
7101 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
7103 tree decl_result
= DECL_RESULT (current_function_decl
);
7104 rtx decl_rtl
= DECL_RTL (decl_result
);
7106 if (REG_P (decl_rtl
)
7107 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
7108 : DECL_REGISTER (decl_result
))
7110 rtx real_decl_rtl
= current_function_return_rtx
;
7112 /* This should be set in assign_parms. */
7113 if (! REG_FUNCTION_VALUE_P (real_decl_rtl
))
7116 /* If this is a BLKmode structure being returned in registers,
7117 then use the mode computed in expand_return. Note that if
7118 decl_rtl is memory, then its mode may have been changed,
7119 but that current_function_return_rtx has not. */
7120 if (GET_MODE (real_decl_rtl
) == BLKmode
)
7121 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
7123 /* If a named return value dumped decl_return to memory, then
7124 we may need to re-do the PROMOTE_MODE signed/unsigned
7126 if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
7128 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (decl_result
));
7130 #ifdef PROMOTE_FUNCTION_RETURN
7131 promote_mode (TREE_TYPE (decl_result
), GET_MODE (decl_rtl
),
7135 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
7137 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
7139 /* If expand_function_start has created a PARALLEL for decl_rtl,
7140 move the result to the real return registers. Otherwise, do
7141 a group load from decl_rtl for a named return. */
7142 if (GET_CODE (decl_rtl
) == PARALLEL
)
7143 emit_group_move (real_decl_rtl
, decl_rtl
);
7145 emit_group_load (real_decl_rtl
, decl_rtl
,
7146 int_size_in_bytes (TREE_TYPE (decl_result
)));
7149 emit_move_insn (real_decl_rtl
, decl_rtl
);
7153 /* If returning a structure, arrange to return the address of the value
7154 in a place where debuggers expect to find it.
7156 If returning a structure PCC style,
7157 the caller also depends on this value.
7158 And current_function_returns_pcc_struct is not necessarily set. */
7159 if (current_function_returns_struct
7160 || current_function_returns_pcc_struct
)
7163 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7164 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
7165 #ifdef FUNCTION_OUTGOING_VALUE
7167 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
7168 current_function_decl
);
7171 = FUNCTION_VALUE (build_pointer_type (type
), current_function_decl
);
7174 /* Mark this as a function return value so integrate will delete the
7175 assignment and USE below when inlining this function. */
7176 REG_FUNCTION_VALUE_P (outgoing
) = 1;
7178 #ifdef POINTERS_EXTEND_UNSIGNED
7179 /* The address may be ptr_mode and OUTGOING may be Pmode. */
7180 if (GET_MODE (outgoing
) != GET_MODE (value_address
))
7181 value_address
= convert_memory_address (GET_MODE (outgoing
),
7185 emit_move_insn (outgoing
, value_address
);
7187 /* Show return register used to hold result (in this case the address
7189 current_function_return_rtx
= outgoing
;
7192 /* If this is an implementation of throw, do what's necessary to
7193 communicate between __builtin_eh_return and the epilogue. */
7194 expand_eh_return ();
7196 /* Emit the actual code to clobber return register. */
7201 clobber_return_register ();
7205 after
= emit_insn_after (seq
, clobber_after
);
7207 if (clobber_after
!= after
)
7208 cfun
->x_clobber_return_insn
= after
;
7211 /* ??? This should no longer be necessary since stupid is no longer with
7212 us, but there are some parts of the compiler (eg reload_combine, and
7213 sh mach_dep_reorg) that still try and compute their own lifetime info
7214 instead of using the general framework. */
7215 use_return_register ();
7217 /* Fix up any gotos that jumped out to the outermost
7218 binding level of the function.
7219 Must follow emitting RETURN_LABEL. */
7221 /* If you have any cleanups to do at this point,
7222 and they need to create temporary variables,
7223 then you will lose. */
7224 expand_fixups (get_insns ());
7228 get_arg_pointer_save_area (f
)
7231 rtx ret
= f
->x_arg_pointer_save_area
;
7235 ret
= assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, f
);
7236 f
->x_arg_pointer_save_area
= ret
;
7239 if (f
== cfun
&& ! f
->arg_pointer_save_area_init
)
7243 /* Save the arg pointer at the beginning of the function. The
7244 generated stack slot may not be a valid memory address, so we
7245 have to check it and fix it if necessary. */
7247 emit_move_insn (validize_mem (ret
), virtual_incoming_args_rtx
);
7251 push_topmost_sequence ();
7252 emit_insn_after (seq
, get_insns ());
7253 pop_topmost_sequence ();
7259 /* Extend a vector that records the INSN_UIDs of INSNS
7260 (a list of one or more insns). */
7263 record_insns (insns
, vecp
)
7272 while (tmp
!= NULL_RTX
)
7275 tmp
= NEXT_INSN (tmp
);
7278 i
= VARRAY_SIZE (*vecp
);
7279 VARRAY_GROW (*vecp
, i
+ len
);
7281 while (tmp
!= NULL_RTX
)
7283 VARRAY_INT (*vecp
, i
) = INSN_UID (tmp
);
7285 tmp
= NEXT_INSN (tmp
);
7289 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
7290 be running after reorg, SEQUENCE rtl is possible. */
7293 contains (insn
, vec
)
7299 if (GET_CODE (insn
) == INSN
7300 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7303 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
7304 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
7305 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == VARRAY_INT (vec
, j
))
7311 for (j
= VARRAY_SIZE (vec
) - 1; j
>= 0; --j
)
7312 if (INSN_UID (insn
) == VARRAY_INT (vec
, j
))
7319 prologue_epilogue_contains (insn
)
7322 if (contains (insn
, prologue
))
7324 if (contains (insn
, epilogue
))
7330 sibcall_epilogue_contains (insn
)
7333 if (sibcall_epilogue
)
7334 return contains (insn
, sibcall_epilogue
);
7339 /* Insert gen_return at the end of block BB. This also means updating
7340 block_for_insn appropriately. */
7343 emit_return_into_block (bb
, line_note
)
7347 emit_jump_insn_after (gen_return (), bb
->end
);
7349 emit_line_note_after (NOTE_SOURCE_FILE (line_note
),
7350 NOTE_LINE_NUMBER (line_note
), PREV_INSN (bb
->end
));
7352 #endif /* HAVE_return */
7354 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7356 /* These functions convert the epilogue into a variant that does not modify the
7357 stack pointer. This is used in cases where a function returns an object
7358 whose size is not known until it is computed. The called function leaves the
7359 object on the stack, leaves the stack depressed, and returns a pointer to
7362 What we need to do is track all modifications and references to the stack
7363 pointer, deleting the modifications and changing the references to point to
7364 the location the stack pointer would have pointed to had the modifications
7367 These functions need to be portable so we need to make as few assumptions
7368 about the epilogue as we can. However, the epilogue basically contains
7369 three things: instructions to reset the stack pointer, instructions to
7370 reload registers, possibly including the frame pointer, and an
7371 instruction to return to the caller.
7373 If we can't be sure of what a relevant epilogue insn is doing, we abort.
7374 We also make no attempt to validate the insns we make since if they are
7375 invalid, we probably can't do anything valid. The intent is that these
7376 routines get "smarter" as more and more machines start to use them and
7377 they try operating on different epilogues.
7379 We use the following structure to track what the part of the epilogue that
7380 we've already processed has done. We keep two copies of the SP equivalence,
7381 one for use during the insn we are processing and one for use in the next
7382 insn. The difference is because one part of a PARALLEL may adjust SP
7383 and the other may use it. */
7387 rtx sp_equiv_reg
; /* REG that SP is set from, perhaps SP. */
7388 HOST_WIDE_INT sp_offset
; /* Offset from SP_EQUIV_REG of present SP. */
7389 rtx new_sp_equiv_reg
; /* REG to be used at end of insn. */
7390 HOST_WIDE_INT new_sp_offset
; /* Offset to be used at end of insn. */
7391 rtx equiv_reg_src
; /* If nonzero, the value that SP_EQUIV_REG
7392 should be set to once we no longer need
7396 static void handle_epilogue_set
PARAMS ((rtx
, struct epi_info
*));
7397 static void emit_equiv_load
PARAMS ((struct epi_info
*));
7399 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7400 no modifications to the stack pointer. Return the new list of insns. */
7403 keep_stack_depressed (insns
)
7407 struct epi_info info
;
7410 /* If the epilogue is just a single instruction, it ust be OK as is. */
7412 if (NEXT_INSN (insns
) == NULL_RTX
)
7415 /* Otherwise, start a sequence, initialize the information we have, and
7416 process all the insns we were given. */
7419 info
.sp_equiv_reg
= stack_pointer_rtx
;
7421 info
.equiv_reg_src
= 0;
7425 while (insn
!= NULL_RTX
)
7427 next
= NEXT_INSN (insn
);
7436 /* If this insn references the register that SP is equivalent to and
7437 we have a pending load to that register, we must force out the load
7438 first and then indicate we no longer know what SP's equivalent is. */
7439 if (info
.equiv_reg_src
!= 0
7440 && reg_referenced_p (info
.sp_equiv_reg
, PATTERN (insn
)))
7442 emit_equiv_load (&info
);
7443 info
.sp_equiv_reg
= 0;
7446 info
.new_sp_equiv_reg
= info
.sp_equiv_reg
;
7447 info
.new_sp_offset
= info
.sp_offset
;
7449 /* If this is a (RETURN) and the return address is on the stack,
7450 update the address and change to an indirect jump. */
7451 if (GET_CODE (PATTERN (insn
)) == RETURN
7452 || (GET_CODE (PATTERN (insn
)) == PARALLEL
7453 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == RETURN
))
7455 rtx retaddr
= INCOMING_RETURN_ADDR_RTX
;
7457 HOST_WIDE_INT offset
= 0;
7458 rtx jump_insn
, jump_set
;
7460 /* If the return address is in a register, we can emit the insn
7461 unchanged. Otherwise, it must be a MEM and we see what the
7462 base register and offset are. In any case, we have to emit any
7463 pending load to the equivalent reg of SP, if any. */
7464 if (GET_CODE (retaddr
) == REG
)
7466 emit_equiv_load (&info
);
7471 else if (GET_CODE (retaddr
) == MEM
7472 && GET_CODE (XEXP (retaddr
, 0)) == REG
)
7473 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (retaddr
, 0))), offset
= 0;
7474 else if (GET_CODE (retaddr
) == MEM
7475 && GET_CODE (XEXP (retaddr
, 0)) == PLUS
7476 && GET_CODE (XEXP (XEXP (retaddr
, 0), 0)) == REG
7477 && GET_CODE (XEXP (XEXP (retaddr
, 0), 1)) == CONST_INT
)
7479 base
= gen_rtx_REG (Pmode
, REGNO (XEXP (XEXP (retaddr
, 0), 0)));
7480 offset
= INTVAL (XEXP (XEXP (retaddr
, 0), 1));
7485 /* If the base of the location containing the return pointer
7486 is SP, we must update it with the replacement address. Otherwise,
7487 just build the necessary MEM. */
7488 retaddr
= plus_constant (base
, offset
);
7489 if (base
== stack_pointer_rtx
)
7490 retaddr
= simplify_replace_rtx (retaddr
, stack_pointer_rtx
,
7491 plus_constant (info
.sp_equiv_reg
,
7494 retaddr
= gen_rtx_MEM (Pmode
, retaddr
);
7496 /* If there is a pending load to the equivalent register for SP
7497 and we reference that register, we must load our address into
7498 a scratch register and then do that load. */
7499 if (info
.equiv_reg_src
7500 && reg_overlap_mentioned_p (info
.equiv_reg_src
, retaddr
))
7505 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7506 if (HARD_REGNO_MODE_OK (regno
, Pmode
)
7507 && !fixed_regs
[regno
]
7508 && TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
7509 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR
->global_live_at_start
,
7511 && !refers_to_regno_p (regno
,
7512 regno
+ HARD_REGNO_NREGS (regno
,
7514 info
.equiv_reg_src
, NULL
))
7517 if (regno
== FIRST_PSEUDO_REGISTER
)
7520 reg
= gen_rtx_REG (Pmode
, regno
);
7521 emit_move_insn (reg
, retaddr
);
7525 emit_equiv_load (&info
);
7526 jump_insn
= emit_jump_insn (gen_indirect_jump (retaddr
));
7528 /* Show the SET in the above insn is a RETURN. */
7529 jump_set
= single_set (jump_insn
);
7533 SET_IS_RETURN_P (jump_set
) = 1;
7536 /* If SP is not mentioned in the pattern and its equivalent register, if
7537 any, is not modified, just emit it. Otherwise, if neither is set,
7538 replace the reference to SP and emit the insn. If none of those are
7539 true, handle each SET individually. */
7540 else if (!reg_mentioned_p (stack_pointer_rtx
, PATTERN (insn
))
7541 && (info
.sp_equiv_reg
== stack_pointer_rtx
7542 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
7544 else if (! reg_set_p (stack_pointer_rtx
, insn
)
7545 && (info
.sp_equiv_reg
== stack_pointer_rtx
7546 || !reg_set_p (info
.sp_equiv_reg
, insn
)))
7548 if (! validate_replace_rtx (stack_pointer_rtx
,
7549 plus_constant (info
.sp_equiv_reg
,
7556 else if (GET_CODE (PATTERN (insn
)) == SET
)
7557 handle_epilogue_set (PATTERN (insn
), &info
);
7558 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7560 for (j
= 0; j
< XVECLEN (PATTERN (insn
), 0); j
++)
7561 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, j
)) == SET
)
7562 handle_epilogue_set (XVECEXP (PATTERN (insn
), 0, j
), &info
);
7567 info
.sp_equiv_reg
= info
.new_sp_equiv_reg
;
7568 info
.sp_offset
= info
.new_sp_offset
;
7573 insns
= get_insns ();
7578 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
7579 structure that contains information about what we've seen so far. We
7580 process this SET by either updating that data or by emitting one or
7584 handle_epilogue_set (set
, p
)
7588 /* First handle the case where we are setting SP. Record what it is being
7589 set from. If unknown, abort. */
7590 if (reg_set_p (stack_pointer_rtx
, set
))
7592 if (SET_DEST (set
) != stack_pointer_rtx
)
7595 if (GET_CODE (SET_SRC (set
)) == PLUS
7596 && GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
7598 p
->new_sp_equiv_reg
= XEXP (SET_SRC (set
), 0);
7599 p
->new_sp_offset
= INTVAL (XEXP (SET_SRC (set
), 1));
7602 p
->new_sp_equiv_reg
= SET_SRC (set
), p
->new_sp_offset
= 0;
7604 /* If we are adjusting SP, we adjust from the old data. */
7605 if (p
->new_sp_equiv_reg
== stack_pointer_rtx
)
7607 p
->new_sp_equiv_reg
= p
->sp_equiv_reg
;
7608 p
->new_sp_offset
+= p
->sp_offset
;
7611 if (p
->new_sp_equiv_reg
== 0 || GET_CODE (p
->new_sp_equiv_reg
) != REG
)
7617 /* Next handle the case where we are setting SP's equivalent register.
7618 If we already have a value to set it to, abort. We could update, but
7619 there seems little point in handling that case. Note that we have
7620 to allow for the case where we are setting the register set in
7621 the previous part of a PARALLEL inside a single insn. But use the
7622 old offset for any updates within this insn. */
7623 else if (p
->new_sp_equiv_reg
!= 0 && reg_set_p (p
->new_sp_equiv_reg
, set
))
7625 if (!rtx_equal_p (p
->new_sp_equiv_reg
, SET_DEST (set
))
7626 || p
->equiv_reg_src
!= 0)
7630 = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
7631 plus_constant (p
->sp_equiv_reg
,
7635 /* Otherwise, replace any references to SP in the insn to its new value
7636 and emit the insn. */
7639 SET_SRC (set
) = simplify_replace_rtx (SET_SRC (set
), stack_pointer_rtx
,
7640 plus_constant (p
->sp_equiv_reg
,
7642 SET_DEST (set
) = simplify_replace_rtx (SET_DEST (set
), stack_pointer_rtx
,
7643 plus_constant (p
->sp_equiv_reg
,
7649 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
7655 if (p
->equiv_reg_src
!= 0)
7656 emit_move_insn (p
->sp_equiv_reg
, p
->equiv_reg_src
);
7658 p
->equiv_reg_src
= 0;
7662 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7663 this into place with notes indicating where the prologue ends and where
7664 the epilogue begins. Update the basic block information when possible. */
7667 thread_prologue_and_epilogue_insns (f
)
7668 rtx f ATTRIBUTE_UNUSED
;
7672 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7675 #ifdef HAVE_prologue
7676 rtx prologue_end
= NULL_RTX
;
7678 #if defined (HAVE_epilogue) || defined(HAVE_return)
7679 rtx epilogue_end
= NULL_RTX
;
7682 #ifdef HAVE_prologue
7686 seq
= gen_prologue ();
7689 /* Retain a map of the prologue insns. */
7690 record_insns (seq
, &prologue
);
7691 prologue_end
= emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
7696 /* Can't deal with multiple successors of the entry block
7697 at the moment. Function should always have at least one
7699 if (!ENTRY_BLOCK_PTR
->succ
|| ENTRY_BLOCK_PTR
->succ
->succ_next
)
7702 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
7707 /* If the exit block has no non-fake predecessors, we don't need
7709 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7710 if ((e
->flags
& EDGE_FAKE
) == 0)
7716 if (optimize
&& HAVE_return
)
7718 /* If we're allowed to generate a simple return instruction,
7719 then by definition we don't need a full epilogue. Examine
7720 the block that falls through to EXIT. If it does not
7721 contain any code, examine its predecessors and try to
7722 emit (conditional) return instructions. */
7728 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7729 if (e
->flags
& EDGE_FALLTHRU
)
7735 /* Verify that there are no active instructions in the last block. */
7737 while (label
&& GET_CODE (label
) != CODE_LABEL
)
7739 if (active_insn_p (label
))
7741 label
= PREV_INSN (label
);
7744 if (last
->head
== label
&& GET_CODE (label
) == CODE_LABEL
)
7746 rtx epilogue_line_note
= NULL_RTX
;
7748 /* Locate the line number associated with the closing brace,
7749 if we can find one. */
7750 for (seq
= get_last_insn ();
7751 seq
&& ! active_insn_p (seq
);
7752 seq
= PREV_INSN (seq
))
7753 if (GET_CODE (seq
) == NOTE
&& NOTE_LINE_NUMBER (seq
) > 0)
7755 epilogue_line_note
= seq
;
7759 for (e
= last
->pred
; e
; e
= e_next
)
7761 basic_block bb
= e
->src
;
7764 e_next
= e
->pred_next
;
7765 if (bb
== ENTRY_BLOCK_PTR
)
7769 if ((GET_CODE (jump
) != JUMP_INSN
) || JUMP_LABEL (jump
) != label
)
7772 /* If we have an unconditional jump, we can replace that
7773 with a simple return instruction. */
7774 if (simplejump_p (jump
))
7776 emit_return_into_block (bb
, epilogue_line_note
);
7780 /* If we have a conditional jump, we can try to replace
7781 that with a conditional return instruction. */
7782 else if (condjump_p (jump
))
7784 if (! redirect_jump (jump
, 0, 0))
7787 /* If this block has only one successor, it both jumps
7788 and falls through to the fallthru block, so we can't
7790 if (bb
->succ
->succ_next
== NULL
)
7796 /* Fix up the CFG for the successful change we just made. */
7797 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
7800 /* Emit a return insn for the exit fallthru block. Whether
7801 this is still reachable will be determined later. */
7803 emit_barrier_after (last
->end
);
7804 emit_return_into_block (last
, epilogue_line_note
);
7805 epilogue_end
= last
->end
;
7806 last
->succ
->flags
&= ~EDGE_FALLTHRU
;
7811 #ifdef HAVE_epilogue
7814 /* Find the edge that falls through to EXIT. Other edges may exist
7815 due to RETURN instructions, but those don't need epilogues.
7816 There really shouldn't be a mixture -- either all should have
7817 been converted or none, however... */
7819 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7820 if (e
->flags
& EDGE_FALLTHRU
)
7826 epilogue_end
= emit_note (NULL
, NOTE_INSN_EPILOGUE_BEG
);
7828 seq
= gen_epilogue ();
7830 #ifdef INCOMING_RETURN_ADDR_RTX
7831 /* If this function returns with the stack depressed and we can support
7832 it, massage the epilogue to actually do that. */
7833 if (TREE_CODE (TREE_TYPE (current_function_decl
)) == FUNCTION_TYPE
7834 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl
)))
7835 seq
= keep_stack_depressed (seq
);
7838 emit_jump_insn (seq
);
7840 /* Retain a map of the epilogue insns. */
7841 record_insns (seq
, &epilogue
);
7846 insert_insn_on_edge (seq
, e
);
7853 commit_edge_insertions ();
7855 #ifdef HAVE_sibcall_epilogue
7856 /* Emit sibling epilogues before any sibling call sites. */
7857 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
7859 basic_block bb
= e
->src
;
7864 if (GET_CODE (insn
) != CALL_INSN
7865 || ! SIBLING_CALL_P (insn
))
7869 emit_insn (gen_sibcall_epilogue ());
7873 /* Retain a map of the epilogue insns. Used in life analysis to
7874 avoid getting rid of sibcall epilogue insns. Do this before we
7875 actually emit the sequence. */
7876 record_insns (seq
, &sibcall_epilogue
);
7878 i
= PREV_INSN (insn
);
7879 newinsn
= emit_insn_before (seq
, insn
);
7883 #ifdef HAVE_prologue
7888 /* GDB handles `break f' by setting a breakpoint on the first
7889 line note after the prologue. Which means (1) that if
7890 there are line number notes before where we inserted the
7891 prologue we should move them, and (2) we should generate a
7892 note before the end of the first basic block, if there isn't
7895 ??? This behavior is completely broken when dealing with
7896 multiple entry functions. We simply place the note always
7897 into first basic block and let alternate entry points
7901 for (insn
= prologue_end
; insn
; insn
= prev
)
7903 prev
= PREV_INSN (insn
);
7904 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7906 /* Note that we cannot reorder the first insn in the
7907 chain, since rest_of_compilation relies on that
7908 remaining constant. */
7911 reorder_insns (insn
, insn
, prologue_end
);
7915 /* Find the last line number note in the first block. */
7916 for (insn
= ENTRY_BLOCK_PTR
->next_bb
->end
;
7917 insn
!= prologue_end
&& insn
;
7918 insn
= PREV_INSN (insn
))
7919 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7922 /* If we didn't find one, make a copy of the first line number
7926 for (insn
= next_active_insn (prologue_end
);
7928 insn
= PREV_INSN (insn
))
7929 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7931 emit_line_note_after (NOTE_SOURCE_FILE (insn
),
7932 NOTE_LINE_NUMBER (insn
),
7939 #ifdef HAVE_epilogue
7944 /* Similarly, move any line notes that appear after the epilogue.
7945 There is no need, however, to be quite so anal about the existence
7947 for (insn
= epilogue_end
; insn
; insn
= next
)
7949 next
= NEXT_INSN (insn
);
7950 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
7951 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
7957 /* Reposition the prologue-end and epilogue-begin notes after instruction
7958 scheduling and delayed branch scheduling. */
7961 reposition_prologue_and_epilogue_notes (f
)
7962 rtx f ATTRIBUTE_UNUSED
;
7964 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7965 rtx insn
, last
, note
;
7968 if ((len
= VARRAY_SIZE (prologue
)) > 0)
7972 /* Scan from the beginning until we reach the last prologue insn.
7973 We apparently can't depend on basic_block_{head,end} after
7975 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
7977 if (GET_CODE (insn
) == NOTE
)
7979 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
7982 else if (contains (insn
, prologue
))
7992 /* Find the prologue-end note if we haven't already, and
7993 move it to just after the last prologue insn. */
7996 for (note
= last
; (note
= NEXT_INSN (note
));)
7997 if (GET_CODE (note
) == NOTE
7998 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
8002 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
8003 if (GET_CODE (last
) == CODE_LABEL
)
8004 last
= NEXT_INSN (last
);
8005 reorder_insns (note
, note
, last
);
8009 if ((len
= VARRAY_SIZE (epilogue
)) > 0)
8013 /* Scan from the end until we reach the first epilogue insn.
8014 We apparently can't depend on basic_block_{head,end} after
8016 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
8018 if (GET_CODE (insn
) == NOTE
)
8020 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
8023 else if (contains (insn
, epilogue
))
8033 /* Find the epilogue-begin note if we haven't already, and
8034 move it to just before the first epilogue insn. */
8037 for (note
= insn
; (note
= PREV_INSN (note
));)
8038 if (GET_CODE (note
) == NOTE
8039 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
8043 if (PREV_INSN (last
) != note
)
8044 reorder_insns (note
, note
, PREV_INSN (last
));
8047 #endif /* HAVE_prologue or HAVE_epilogue */
8050 /* Called once, at initialization, to initialize function.c. */
8053 init_function_once ()
8055 VARRAY_INT_INIT (prologue
, 0, "prologue");
8056 VARRAY_INT_INIT (epilogue
, 0, "epilogue");
8057 VARRAY_INT_INIT (sibcall_epilogue
, 0, "sibcall_epilogue");
8060 #include "gt-function.h"