1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
103 int current_function_is_leaf
;
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
108 int current_function_sp_is_unchanging
;
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs
;
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117 static int virtuals_instantiated
;
119 /* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
122 void (*init_machine_status
) PROTO((struct function
*));
123 void (*save_machine_status
) PROTO((struct function
*));
124 void (*restore_machine_status
) PROTO((struct function
*));
125 void (*mark_machine_status
) PROTO((struct function
*));
126 void (*free_machine_status
) PROTO((struct function
*));
128 /* Likewise, but for language-specific data. */
129 void (*init_lang_status
) PROTO((struct function
*));
130 void (*save_lang_status
) PROTO((struct function
*));
131 void (*restore_lang_status
) PROTO((struct function
*));
132 void (*mark_lang_status
) PROTO((struct function
*));
133 void (*free_lang_status
) PROTO((struct function
*));
135 /* The FUNCTION_DECL for an inline function currently being expanded. */
136 tree inline_function_decl
;
138 /* The currently compiled function. */
139 struct function
*current_function
= 0;
141 /* Global list of all compiled functions. */
142 struct function
*all_functions
= 0;
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static int *prologue
;
146 static int *epilogue
;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
168 /* Points to next temporary slot. */
169 struct temp_slot
*next
;
170 /* The rtx to used to reference the slot. */
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
175 /* The alignment (in bits) of the slot. */
177 /* The size, in units, of the slot. */
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
192 /* Non-zero if this temporary is currently in use. */
194 /* Non-zero if this temporary has its address taken. */
196 /* Nesting level at which this slot is being used. */
198 /* Non-zero if this should survive a call to free_temp_slots. */
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
202 HOST_WIDE_INT base_offset
;
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
205 HOST_WIDE_INT full_size
;
208 /* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
213 struct fixup_replacement
217 struct fixup_replacement
*next
;
220 struct insns_for_mem_entry
{
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he
;
223 /* These are the INSNS which reference the MEM. */
227 /* Forward declarations. */
229 static rtx assign_stack_local_1
PROTO ((enum machine_mode
, HOST_WIDE_INT
,
230 int, struct function
*));
231 static rtx assign_stack_temp_for_type
PROTO ((enum machine_mode
, HOST_WIDE_INT
,
233 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
234 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
235 enum machine_mode
, enum machine_mode
,
237 struct hash_table
*));
238 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int,
239 struct hash_table
*));
240 static struct fixup_replacement
241 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
242 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
243 rtx
, int, struct hash_table
*));
244 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
245 struct fixup_replacement
**));
246 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
247 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
248 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
249 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
250 static void instantiate_decls
PROTO((tree
, int));
251 static void instantiate_decls_1
PROTO((tree
, int));
252 static void instantiate_decl
PROTO((rtx
, int, int));
253 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
254 static void delete_handlers
PROTO((void));
255 static void pad_to_arg_alignment
PROTO((struct args_size
*, int, struct args_size
*));
256 #ifndef ARGS_GROW_DOWNWARD
257 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
260 #ifdef ARGS_GROW_DOWNWARD
261 static tree round_down
PROTO((tree
, int));
263 static rtx round_trampoline_addr
PROTO((rtx
));
264 static tree blocks_nreverse
PROTO((tree
));
265 static int all_blocks
PROTO((tree
, tree
*));
266 /* We always define `record_insns' even if its not used so that we
267 can always export `prologue_epilogue_contains'. */
268 static int *record_insns
PROTO((rtx
)) ATTRIBUTE_UNUSED
;
269 static int contains
PROTO((rtx
, int *));
270 static void put_addressof_into_stack
PROTO((rtx
, struct hash_table
*));
271 static boolean purge_addressof_1
PROTO((rtx
*, rtx
, int, int,
272 struct hash_table
*));
273 static int is_addressof
PROTO ((rtx
*, void *));
274 static struct hash_entry
*insns_for_mem_newfunc
PROTO((struct hash_entry
*,
277 static unsigned long insns_for_mem_hash
PROTO ((hash_table_key
));
278 static boolean insns_for_mem_comp
PROTO ((hash_table_key
, hash_table_key
));
279 static int insns_for_mem_walk
PROTO ((rtx
*, void *));
280 static void compute_insns_for_mem
PROTO ((rtx
, rtx
, struct hash_table
*));
281 static void mark_temp_slot
PROTO ((struct temp_slot
*));
282 static void mark_function_status
PROTO ((struct function
*));
283 static void mark_function_chain
PROTO ((void *));
284 static void prepare_function_start
PROTO ((void));
287 /* Pointer to chain of `struct function' for containing functions. */
288 struct function
*outer_function_chain
;
290 /* Given a function decl for a containing function,
291 return the `struct function' for it. */
294 find_function_data (decl
)
299 for (p
= outer_function_chain
; p
; p
= p
->next
)
306 /* Save the current context for compilation of a nested function.
307 This is called from language-specific code. The caller should use
308 the save_lang_status callback to save any language-specific state,
309 since this function knows only about language-independent
313 push_function_context_to (context
)
316 struct function
*p
, *context_data
;
320 context_data
= (context
== current_function_decl
322 : find_function_data (context
));
323 context_data
->contains_functions
= 1;
326 if (current_function
== 0)
327 init_dummy_function_start ();
328 p
= current_function
;
330 p
->next
= outer_function_chain
;
331 outer_function_chain
= p
;
332 p
->fixup_var_refs_queue
= 0;
334 save_tree_status (p
);
335 if (save_lang_status
)
336 (*save_lang_status
) (p
);
337 if (save_machine_status
)
338 (*save_machine_status
) (p
);
340 current_function
= 0;
344 push_function_context ()
346 push_function_context_to (current_function_decl
);
349 /* Restore the last saved context, at the end of a nested function.
350 This function is called from language-specific code. */
353 pop_function_context_from (context
)
354 tree context ATTRIBUTE_UNUSED
;
356 struct function
*p
= outer_function_chain
;
357 struct var_refs_queue
*queue
;
358 struct var_refs_queue
*next
;
360 current_function
= p
;
361 outer_function_chain
= p
->next
;
363 current_function_decl
= p
->decl
;
366 restore_tree_status (p
);
367 restore_emit_status (p
);
369 if (restore_machine_status
)
370 (*restore_machine_status
) (p
);
371 if (restore_lang_status
)
372 (*restore_lang_status
) (p
);
374 /* Finish doing put_var_into_stack for any of our variables
375 which became addressable during the nested function. */
376 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= next
)
379 fixup_var_refs (queue
->modified
, queue
->promoted_mode
,
380 queue
->unsignedp
, 0);
383 p
->fixup_var_refs_queue
= 0;
385 /* Reset variables that have known state during rtx generation. */
386 rtx_equal_function_value_matters
= 1;
387 virtuals_instantiated
= 0;
391 pop_function_context ()
393 pop_function_context_from (current_function_decl
);
396 /* Clear out all parts of the state in F that can safely be discarded
397 after the function has been parsed, but not compiled, to let
398 garbage collection reclaim the memory. */
401 free_after_parsing (f
)
404 /* f->expr->forced_labels is used by code generation. */
405 /* f->emit->regno_reg_rtx is used by code generation. */
406 /* f->varasm is used by code generation. */
407 /* f->eh->eh_return_stub_label is used by code generation. */
409 if (free_lang_status
)
410 (*free_lang_status
) (f
);
411 free_stmt_status (f
);
414 /* Clear out all parts of the state in F that can safely be discarded
415 after the function has been compiled, to let garbage collection
416 reclaim the memory. */
419 free_after_compilation (f
)
423 free_expr_status (f
);
424 free_emit_status (f
);
425 free_varasm_status (f
);
427 if (free_machine_status
)
428 (*free_machine_status
) (f
);
430 if (f
->x_parm_reg_stack_loc
)
431 free (f
->x_parm_reg_stack_loc
);
433 f
->arg_offset_rtx
= NULL
;
434 f
->return_rtx
= NULL
;
435 f
->internal_arg_pointer
= NULL
;
436 f
->x_nonlocal_labels
= NULL
;
437 f
->x_nonlocal_goto_handler_slots
= NULL
;
438 f
->x_nonlocal_goto_handler_labels
= NULL
;
439 f
->x_nonlocal_goto_stack_level
= NULL
;
440 f
->x_cleanup_label
= NULL
;
441 f
->x_return_label
= NULL
;
442 f
->x_save_expr_regs
= NULL
;
443 f
->x_stack_slot_list
= NULL
;
444 f
->x_rtl_expr_chain
= NULL
;
445 f
->x_tail_recursion_label
= NULL
;
446 f
->x_tail_recursion_reentry
= NULL
;
447 f
->x_arg_pointer_save_area
= NULL
;
448 f
->x_context_display
= NULL
;
449 f
->x_trampoline_list
= NULL
;
450 f
->x_parm_birth_insn
= NULL
;
451 f
->x_last_parm_insn
= NULL
;
452 f
->x_parm_reg_stack_loc
= NULL
;
453 f
->x_temp_slots
= NULL
;
454 f
->fixup_var_refs_queue
= NULL
;
455 f
->original_arg_vector
= NULL
;
456 f
->original_decl_initial
= NULL
;
457 f
->inl_last_parm_insn
= NULL
;
458 f
->epilogue_delay_list
= NULL
;
462 /* Allocate fixed slots in the stack frame of the current function. */
464 /* Return size needed for stack frame based on slots so far allocated in
466 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
467 the caller may have to do that. */
470 get_func_frame_size (f
)
473 #ifdef FRAME_GROWS_DOWNWARD
474 return -f
->x_frame_offset
;
476 return f
->x_frame_offset
;
480 /* Return size needed for stack frame based on slots so far allocated.
481 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
482 the caller may have to do that. */
486 return get_func_frame_size (current_function
);
489 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
490 with machine mode MODE.
492 ALIGN controls the amount of alignment for the address of the slot:
493 0 means according to MODE,
494 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
495 positive specifies alignment boundary in bits.
497 We do not round to stack_boundary here.
499 FUNCTION specifies the function to allocate in. */
502 assign_stack_local_1 (mode
, size
, align
, function
)
503 enum machine_mode mode
;
506 struct function
*function
;
508 register rtx x
, addr
;
509 int bigend_correction
= 0;
512 /* Allocate in the memory associated with the function in whose frame
514 if (function
!= current_function
)
515 push_obstacks (function
->function_obstack
,
516 function
->function_maybepermanent_obstack
);
522 alignment
= GET_MODE_ALIGNMENT (mode
);
524 alignment
= BIGGEST_ALIGNMENT
;
526 /* Allow the target to (possibly) increase the alignment of this
528 type
= type_for_mode (mode
, 0);
530 alignment
= LOCAL_ALIGNMENT (type
, alignment
);
532 alignment
/= BITS_PER_UNIT
;
534 else if (align
== -1)
536 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
537 size
= CEIL_ROUND (size
, alignment
);
540 alignment
= align
/ BITS_PER_UNIT
;
542 #ifdef FRAME_GROWS_DOWNWARD
543 function
->x_frame_offset
-= size
;
546 /* Round frame offset to that alignment.
547 We must be careful here, since FRAME_OFFSET might be negative and
548 division with a negative dividend isn't as well defined as we might
549 like. So we instead assume that ALIGNMENT is a power of two and
550 use logical operations which are unambiguous. */
551 #ifdef FRAME_GROWS_DOWNWARD
552 function
->x_frame_offset
= FLOOR_ROUND (function
->x_frame_offset
, alignment
);
554 function
->x_frame_offset
= CEIL_ROUND (function
->x_frame_offset
, alignment
);
557 /* On a big-endian machine, if we are allocating more space than we will use,
558 use the least significant bytes of those that are allocated. */
559 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
560 bigend_correction
= size
- GET_MODE_SIZE (mode
);
562 /* If we have already instantiated virtual registers, return the actual
563 address relative to the frame pointer. */
564 if (function
== current_function
&& virtuals_instantiated
)
565 addr
= plus_constant (frame_pointer_rtx
,
566 (frame_offset
+ bigend_correction
567 + STARTING_FRAME_OFFSET
));
569 addr
= plus_constant (virtual_stack_vars_rtx
,
570 function
->x_frame_offset
+ bigend_correction
);
572 #ifndef FRAME_GROWS_DOWNWARD
573 function
->x_frame_offset
+= size
;
576 x
= gen_rtx_MEM (mode
, addr
);
578 function
->x_stack_slot_list
579 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->x_stack_slot_list
);
581 if (function
!= current_function
)
587 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
590 assign_stack_local (mode
, size
, align
)
591 enum machine_mode mode
;
595 return assign_stack_local_1 (mode
, size
, align
, current_function
);
598 /* Allocate a temporary stack slot and record it for possible later
601 MODE is the machine mode to be given to the returned rtx.
603 SIZE is the size in units of the space required. We do no rounding here
604 since assign_stack_local will do any required rounding.
606 KEEP is 1 if this slot is to be retained after a call to
607 free_temp_slots. Automatic variables for a block are allocated
608 with this flag. KEEP is 2 if we allocate a longer term temporary,
609 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
610 if we are to allocate something at an inner level to be treated as
611 a variable in the block (e.g., a SAVE_EXPR).
613 TYPE is the type that will be used for the stack slot. */
616 assign_stack_temp_for_type (mode
, size
, keep
, type
)
617 enum machine_mode mode
;
624 struct temp_slot
*p
, *best_p
= 0;
626 /* If SIZE is -1 it means that somebody tried to allocate a temporary
627 of a variable size. */
631 /* If we know the alias set for the memory that will be used, use
632 it. If there's no TYPE, then we don't know anything about the
633 alias set for the memory. */
635 alias_set
= get_alias_set (type
);
639 align
= GET_MODE_ALIGNMENT (mode
);
641 align
= BIGGEST_ALIGNMENT
;
644 type
= type_for_mode (mode
, 0);
646 align
= LOCAL_ALIGNMENT (type
, align
);
648 /* Try to find an available, already-allocated temporary of the proper
649 mode which meets the size and alignment requirements. Choose the
650 smallest one with the closest alignment. */
651 for (p
= temp_slots
; p
; p
= p
->next
)
652 if (p
->align
>= align
&& p
->size
>= size
&& GET_MODE (p
->slot
) == mode
654 && (!flag_strict_aliasing
655 || (alias_set
&& p
->alias_set
== alias_set
))
656 && (best_p
== 0 || best_p
->size
> p
->size
657 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
659 if (p
->align
== align
&& p
->size
== size
)
667 /* Make our best, if any, the one to use. */
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
673 if (GET_MODE (best_p
->slot
) == BLKmode
674 /* We can't split slots if -fstrict-aliasing because the
675 information about the alias set for the new slot will be
677 && !flag_strict_aliasing
)
679 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
680 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
682 if (best_p
->size
- rounded_size
>= alignment
)
684 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
685 p
->in_use
= p
->addr_taken
= 0;
686 p
->size
= best_p
->size
- rounded_size
;
687 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
688 p
->full_size
= best_p
->full_size
- rounded_size
;
689 p
->slot
= gen_rtx_MEM (BLKmode
,
690 plus_constant (XEXP (best_p
->slot
, 0),
692 p
->align
= best_p
->align
;
695 p
->next
= temp_slots
;
698 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
701 best_p
->size
= rounded_size
;
702 best_p
->full_size
= rounded_size
;
709 /* If we still didn't find one, make a new temporary. */
712 HOST_WIDE_INT frame_offset_old
= frame_offset
;
714 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
716 /* We are passing an explicit alignment request to assign_stack_local.
717 One side effect of that is assign_stack_local will not round SIZE
718 to ensure the frame offset remains suitably aligned.
720 So for requests which depended on the rounding of SIZE, we go ahead
721 and round it now. We also make sure ALIGNMENT is at least
722 BIGGEST_ALIGNMENT. */
723 if (mode
== BLKmode
&& align
< BIGGEST_ALIGNMENT
)
725 p
->slot
= assign_stack_local (mode
,
727 ? CEIL_ROUND (size
, align
/ BITS_PER_UNIT
)
732 p
->alias_set
= alias_set
;
734 /* The following slot size computation is necessary because we don't
735 know the actual size of the temporary slot until assign_stack_local
736 has performed all the frame alignment and size rounding for the
737 requested temporary. Note that extra space added for alignment
738 can be either above or below this stack slot depending on which
739 way the frame grows. We include the extra space if and only if it
740 is above this slot. */
741 #ifdef FRAME_GROWS_DOWNWARD
742 p
->size
= frame_offset_old
- frame_offset
;
747 /* Now define the fields used by combine_temp_slots. */
748 #ifdef FRAME_GROWS_DOWNWARD
749 p
->base_offset
= frame_offset
;
750 p
->full_size
= frame_offset_old
- frame_offset
;
752 p
->base_offset
= frame_offset_old
;
753 p
->full_size
= frame_offset
- frame_offset_old
;
756 p
->next
= temp_slots
;
762 p
->rtl_expr
= seq_rtl_expr
;
766 p
->level
= target_temp_slot_level
;
771 p
->level
= var_temp_slot_level
;
776 p
->level
= temp_slot_level
;
780 /* We may be reusing an old slot, so clear any MEM flags that may have been
782 RTX_UNCHANGING_P (p
->slot
) = 0;
783 MEM_IN_STRUCT_P (p
->slot
) = 0;
784 MEM_SCALAR_P (p
->slot
) = 0;
785 MEM_ALIAS_SET (p
->slot
) = 0;
789 /* Allocate a temporary stack slot and record it for possible later
790 reuse. First three arguments are same as in preceding function. */
793 assign_stack_temp (mode
, size
, keep
)
794 enum machine_mode mode
;
798 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
801 /* Assign a temporary of given TYPE.
802 KEEP is as for assign_stack_temp.
803 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
804 it is 0 if a register is OK.
805 DONT_PROMOTE is 1 if we should not promote values in register
809 assign_temp (type
, keep
, memory_required
, dont_promote
)
815 enum machine_mode mode
= TYPE_MODE (type
);
816 int unsignedp
= TREE_UNSIGNED (type
);
818 if (mode
== BLKmode
|| memory_required
)
820 HOST_WIDE_INT size
= int_size_in_bytes (type
);
823 /* Unfortunately, we don't yet know how to allocate variable-sized
824 temporaries. However, sometimes we have a fixed upper limit on
825 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
826 instead. This is the case for Chill variable-sized strings. */
827 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
828 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
829 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
830 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
832 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
833 MEM_SET_IN_STRUCT_P (tmp
, AGGREGATE_TYPE_P (type
));
837 #ifndef PROMOTE_FOR_CALL_ONLY
839 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
842 return gen_reg_rtx (mode
);
845 /* Combine temporary stack slots which are adjacent on the stack.
847 This allows for better use of already allocated stack space. This is only
848 done for BLKmode slots because we can be sure that we won't have alignment
849 problems in this case. */
852 combine_temp_slots ()
854 struct temp_slot
*p
, *q
;
855 struct temp_slot
*prev_p
, *prev_q
;
858 /* We can't combine slots, because the information about which slot
859 is in which alias set will be lost. */
860 if (flag_strict_aliasing
)
863 /* If there are a lot of temp slots, don't do anything unless
864 high levels of optimizaton. */
865 if (! flag_expensive_optimizations
)
866 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
867 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
870 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
874 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
875 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
878 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
880 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
882 /* Q comes after P; combine Q into P. */
884 p
->full_size
+= q
->full_size
;
887 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
889 /* P comes after Q; combine P into Q. */
891 q
->full_size
+= p
->full_size
;
896 /* Either delete Q or advance past it. */
898 prev_q
->next
= q
->next
;
902 /* Either delete P or advance past it. */
906 prev_p
->next
= p
->next
;
908 temp_slots
= p
->next
;
915 /* Find the temp slot corresponding to the object at address X. */
917 static struct temp_slot
*
918 find_temp_slot_from_address (x
)
924 for (p
= temp_slots
; p
; p
= p
->next
)
929 else if (XEXP (p
->slot
, 0) == x
931 || (GET_CODE (x
) == PLUS
932 && XEXP (x
, 0) == virtual_stack_vars_rtx
933 && GET_CODE (XEXP (x
, 1)) == CONST_INT
934 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
935 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
938 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
939 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
940 if (XEXP (next
, 0) == x
)
944 /* If we have a sum involving a register, see if it points to a temp
946 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == REG
947 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
949 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
950 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
956 /* Indicate that NEW is an alternate way of referring to the temp slot
957 that previously was known by OLD. */
960 update_temp_slot_address (old
, new)
965 if (rtx_equal_p (old
, new))
968 p
= find_temp_slot_from_address (old
);
970 /* If we didn't find one, see if both OLD and NEW are a PLUS and if
971 there is a register in common between them. If so, try a recursive
972 call on those values. */
975 if (GET_CODE (old
) != PLUS
|| GET_CODE (new) != PLUS
)
978 if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 0)))
979 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 1));
980 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 0)))
981 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 1));
982 else if (rtx_equal_p (XEXP (old
, 0), XEXP (new, 1)))
983 update_temp_slot_address (XEXP (old
, 1), XEXP (new, 0));
984 else if (rtx_equal_p (XEXP (old
, 1), XEXP (new, 1)))
985 update_temp_slot_address (XEXP (old
, 0), XEXP (new, 0));
990 /* Otherwise add an alias for the temp's address. */
991 else if (p
->address
== 0)
995 if (GET_CODE (p
->address
) != EXPR_LIST
)
996 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
998 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1002 /* If X could be a reference to a temporary slot, mark the fact that its
1003 address was taken. */
1006 mark_temp_addr_taken (x
)
1009 struct temp_slot
*p
;
1014 /* If X is not in memory or is at a constant address, it cannot be in
1015 a temporary slot. */
1016 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1019 p
= find_temp_slot_from_address (XEXP (x
, 0));
1024 /* If X could be a reference to a temporary slot, mark that slot as
1025 belonging to the to one level higher than the current level. If X
1026 matched one of our slots, just mark that one. Otherwise, we can't
1027 easily predict which it is, so upgrade all of them. Kept slots
1028 need not be touched.
1030 This is called when an ({...}) construct occurs and a statement
1031 returns a value in memory. */
1034 preserve_temp_slots (x
)
1037 struct temp_slot
*p
= 0;
1039 /* If there is no result, we still might have some objects whose address
1040 were taken, so we need to make sure they stay around. */
1043 for (p
= temp_slots
; p
; p
= p
->next
)
1044 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1050 /* If X is a register that is being used as a pointer, see if we have
1051 a temporary slot we know it points to. To be consistent with
1052 the code below, we really should preserve all non-kept slots
1053 if we can't find a match, but that seems to be much too costly. */
1054 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1055 p
= find_temp_slot_from_address (x
);
1057 /* If X is not in memory or is at a constant address, it cannot be in
1058 a temporary slot, but it can contain something whose address was
1060 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1062 for (p
= temp_slots
; p
; p
= p
->next
)
1063 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1069 /* First see if we can find a match. */
1071 p
= find_temp_slot_from_address (XEXP (x
, 0));
1075 /* Move everything at our level whose address was taken to our new
1076 level in case we used its address. */
1077 struct temp_slot
*q
;
1079 if (p
->level
== temp_slot_level
)
1081 for (q
= temp_slots
; q
; q
= q
->next
)
1082 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1091 /* Otherwise, preserve all non-kept slots at this level. */
1092 for (p
= temp_slots
; p
; p
= p
->next
)
1093 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1097 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1098 with that RTL_EXPR, promote it into a temporary slot at the present
1099 level so it will not be freed when we free slots made in the
1103 preserve_rtl_expr_result (x
)
1106 struct temp_slot
*p
;
1108 /* If X is not in memory or is at a constant address, it cannot be in
1109 a temporary slot. */
1110 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1113 /* If we can find a match, move it to our level unless it is already at
1115 p
= find_temp_slot_from_address (XEXP (x
, 0));
1118 p
->level
= MIN (p
->level
, temp_slot_level
);
1125 /* Free all temporaries used so far. This is normally called at the end
1126 of generating code for a statement. Don't free any temporaries
1127 currently in use for an RTL_EXPR that hasn't yet been emitted.
1128 We could eventually do better than this since it can be reused while
1129 generating the same RTL_EXPR, but this is complex and probably not
1135 struct temp_slot
*p
;
1137 for (p
= temp_slots
; p
; p
= p
->next
)
1138 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1139 && p
->rtl_expr
== 0)
1142 combine_temp_slots ();
1145 /* Free all temporary slots used in T, an RTL_EXPR node. */
1148 free_temps_for_rtl_expr (t
)
1151 struct temp_slot
*p
;
1153 for (p
= temp_slots
; p
; p
= p
->next
)
1154 if (p
->rtl_expr
== t
)
1157 combine_temp_slots ();
1160 /* Mark all temporaries ever allocated in this function as not suitable
1161 for reuse until the current level is exited. */
1164 mark_all_temps_used ()
1166 struct temp_slot
*p
;
1168 for (p
= temp_slots
; p
; p
= p
->next
)
1170 p
->in_use
= p
->keep
= 1;
1171 p
->level
= MIN (p
->level
, temp_slot_level
);
1175 /* Push deeper into the nesting level for stack temporaries. */
1183 /* Likewise, but save the new level as the place to allocate variables
1188 push_temp_slots_for_block ()
1192 var_temp_slot_level
= temp_slot_level
;
1195 /* Likewise, but save the new level as the place to allocate temporaries
1196 for TARGET_EXPRs. */
1199 push_temp_slots_for_target ()
1203 target_temp_slot_level
= temp_slot_level
;
1206 /* Set and get the value of target_temp_slot_level. The only
1207 permitted use of these functions is to save and restore this value. */
1210 get_target_temp_slot_level ()
1212 return target_temp_slot_level
;
1216 set_target_temp_slot_level (level
)
1219 target_temp_slot_level
= level
;
1223 /* Pop a temporary nesting level. All slots in use in the current level
1229 struct temp_slot
*p
;
1231 for (p
= temp_slots
; p
; p
= p
->next
)
1232 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1235 combine_temp_slots ();
1240 /* Initialize temporary slots. */
1245 /* We have not allocated any temporaries yet. */
1247 temp_slot_level
= 0;
1248 var_temp_slot_level
= 0;
1249 target_temp_slot_level
= 0;
1252 /* Retroactively move an auto variable from a register to a stack slot.
1253 This is done when an address-reference to the variable is seen. */
1256 put_var_into_stack (decl
)
1260 enum machine_mode promoted_mode
, decl_mode
;
1261 struct function
*function
= 0;
1263 int can_use_addressof
;
1265 context
= decl_function_context (decl
);
1267 /* Get the current rtl used for this object and its original mode. */
1268 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1270 /* No need to do anything if decl has no rtx yet
1271 since in that case caller is setting TREE_ADDRESSABLE
1272 and a stack slot will be assigned when the rtl is made. */
1276 /* Get the declared mode for this object. */
1277 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1278 : DECL_MODE (decl
));
1279 /* Get the mode it's actually stored in. */
1280 promoted_mode
= GET_MODE (reg
);
1282 /* If this variable comes from an outer function,
1283 find that function's saved context. */
1284 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1285 for (function
= outer_function_chain
; function
; function
= function
->next
)
1286 if (function
->decl
== context
)
1289 /* If this is a variable-size object with a pseudo to address it,
1290 put that pseudo into the stack, if the var is nonlocal. */
1291 if (DECL_NONLOCAL (decl
)
1292 && GET_CODE (reg
) == MEM
1293 && GET_CODE (XEXP (reg
, 0)) == REG
1294 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1296 reg
= XEXP (reg
, 0);
1297 decl_mode
= promoted_mode
= GET_MODE (reg
);
1303 /* FIXME make it work for promoted modes too */
1304 && decl_mode
== promoted_mode
1305 #ifdef NON_SAVING_SETJMP
1306 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1310 /* If we can't use ADDRESSOF, make sure we see through one we already
1312 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1313 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1314 reg
= XEXP (XEXP (reg
, 0), 0);
1316 /* Now we should have a value that resides in one or more pseudo regs. */
1318 if (GET_CODE (reg
) == REG
)
1320 /* If this variable lives in the current function and we don't need
1321 to put things in the stack for the sake of setjmp, try to keep it
1322 in a register until we know we actually need the address. */
1323 if (can_use_addressof
)
1324 gen_mem_addressof (reg
, decl
);
1326 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1327 promoted_mode
, decl_mode
,
1328 TREE_SIDE_EFFECTS (decl
), 0,
1329 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1332 else if (GET_CODE (reg
) == CONCAT
)
1334 /* A CONCAT contains two pseudos; put them both in the stack.
1335 We do it so they end up consecutive. */
1336 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1337 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1338 #ifdef FRAME_GROWS_DOWNWARD
1339 /* Since part 0 should have a lower address, do it second. */
1340 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1341 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1342 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1344 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1345 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1346 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1349 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1350 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1351 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1353 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1354 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1355 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0,
1359 /* Change the CONCAT into a combined MEM for both parts. */
1360 PUT_CODE (reg
, MEM
);
1361 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1362 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
1364 /* The two parts are in memory order already.
1365 Use the lower parts address as ours. */
1366 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1367 /* Prevent sharing of rtl that might lose. */
1368 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1369 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1374 if (current_function_check_memory_usage
)
1375 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1376 XEXP (reg
, 0), Pmode
,
1377 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1378 TYPE_MODE (sizetype
),
1379 GEN_INT (MEMORY_USE_RW
),
1380 TYPE_MODE (integer_type_node
));
1383 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1384 into the stack frame of FUNCTION (0 means the current function).
1385 DECL_MODE is the machine mode of the user-level data type.
1386 PROMOTED_MODE is the machine mode of the register.
1387 VOLATILE_P is nonzero if this is for a "volatile" decl.
1388 USED_P is nonzero if this reg might have already been used in an insn. */
1391 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1392 original_regno
, used_p
, ht
)
1393 struct function
*function
;
1396 enum machine_mode promoted_mode
, decl_mode
;
1400 struct hash_table
*ht
;
1402 struct function
*func
= function
? function
: current_function
;
1404 int regno
= original_regno
;
1407 regno
= REGNO (reg
);
1409 if (regno
< func
->x_max_parm_reg
)
1410 new = func
->x_parm_reg_stack_loc
[regno
];
1412 new = assign_stack_local_1 (decl_mode
, GET_MODE_SIZE (decl_mode
), 0, func
);
1414 PUT_CODE (reg
, MEM
);
1415 PUT_MODE (reg
, decl_mode
);
1416 XEXP (reg
, 0) = XEXP (new, 0);
1417 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1418 MEM_VOLATILE_P (reg
) = volatile_p
;
1420 /* If this is a memory ref that contains aggregate components,
1421 mark it as such for cse and loop optimize. If we are reusing a
1422 previously generated stack slot, then we need to copy the bit in
1423 case it was set for other reasons. For instance, it is set for
1424 __builtin_va_alist. */
1425 MEM_SET_IN_STRUCT_P (reg
,
1426 AGGREGATE_TYPE_P (type
) || MEM_IN_STRUCT_P (new));
1427 MEM_ALIAS_SET (reg
) = get_alias_set (type
);
1429 /* Now make sure that all refs to the variable, previously made
1430 when it was a register, are fixed up to be valid again. */
1432 if (used_p
&& function
!= 0)
1434 struct var_refs_queue
*temp
;
1437 = (struct var_refs_queue
*) xmalloc (sizeof (struct var_refs_queue
));
1438 temp
->modified
= reg
;
1439 temp
->promoted_mode
= promoted_mode
;
1440 temp
->unsignedp
= TREE_UNSIGNED (type
);
1441 temp
->next
= function
->fixup_var_refs_queue
;
1442 function
->fixup_var_refs_queue
= temp
;
1445 /* Variable is local; fix it up now. */
1446 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
), ht
);
1450 fixup_var_refs (var
, promoted_mode
, unsignedp
, ht
)
1452 enum machine_mode promoted_mode
;
1454 struct hash_table
*ht
;
1457 rtx first_insn
= get_insns ();
1458 struct sequence_stack
*stack
= seq_stack
;
1459 tree rtl_exps
= rtl_expr_chain
;
1461 /* Must scan all insns for stack-refs that exceed the limit. */
1462 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
,
1464 /* If there's a hash table, it must record all uses of VAR. */
1468 /* Scan all pending sequences too. */
1469 for (; stack
; stack
= stack
->next
)
1471 push_to_sequence (stack
->first
);
1472 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1473 stack
->first
, stack
->next
!= 0, 0);
1474 /* Update remembered end of sequence
1475 in case we added an insn at the end. */
1476 stack
->last
= get_last_insn ();
1480 /* Scan all waiting RTL_EXPRs too. */
1481 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1483 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1484 if (seq
!= const0_rtx
&& seq
!= 0)
1486 push_to_sequence (seq
);
1487 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0,
1493 /* Scan the catch clauses for exception handling too. */
1494 push_to_sequence (catch_clauses
);
1495 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, catch_clauses
,
1500 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1501 some part of an insn. Return a struct fixup_replacement whose OLD
1502 value is equal to X. Allocate a new structure if no such entry exists. */
1504 static struct fixup_replacement
*
1505 find_fixup_replacement (replacements
, x
)
1506 struct fixup_replacement
**replacements
;
1509 struct fixup_replacement
*p
;
1511 /* See if we have already replaced this. */
1512 for (p
= *replacements
; p
!= 0 && ! rtx_equal_p (p
->old
, x
); p
= p
->next
)
1517 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1520 p
->next
= *replacements
;
1527 /* Scan the insn-chain starting with INSN for refs to VAR
1528 and fix them up. TOPLEVEL is nonzero if this chain is the
1529 main chain of insns for the current function. */
1532 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
, ht
)
1534 enum machine_mode promoted_mode
;
1538 struct hash_table
*ht
;
1541 rtx insn_list
= NULL_RTX
;
1543 /* If we already know which INSNs reference VAR there's no need
1544 to walk the entire instruction chain. */
1547 insn_list
= ((struct insns_for_mem_entry
*)
1548 hash_lookup (ht
, var
, /*create=*/0, /*copy=*/0))->insns
;
1549 insn
= insn_list
? XEXP (insn_list
, 0) : NULL_RTX
;
1550 insn_list
= XEXP (insn_list
, 1);
1555 rtx next
= NEXT_INSN (insn
);
1556 rtx set
, prev
, prev_set
;
1559 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1561 /* Remember the notes in case we delete the insn. */
1562 note
= REG_NOTES (insn
);
1564 /* If this is a CLOBBER of VAR, delete it.
1566 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1567 and REG_RETVAL notes too. */
1568 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1569 && (XEXP (PATTERN (insn
), 0) == var
1570 || (GET_CODE (XEXP (PATTERN (insn
), 0)) == CONCAT
1571 && (XEXP (XEXP (PATTERN (insn
), 0), 0) == var
1572 || XEXP (XEXP (PATTERN (insn
), 0), 1) == var
))))
1574 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1575 /* The REG_LIBCALL note will go away since we are going to
1576 turn INSN into a NOTE, so just delete the
1577 corresponding REG_RETVAL note. */
1578 remove_note (XEXP (note
, 0),
1579 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1582 /* In unoptimized compilation, we shouldn't call delete_insn
1583 except in jump.c doing warnings. */
1584 PUT_CODE (insn
, NOTE
);
1585 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1586 NOTE_SOURCE_FILE (insn
) = 0;
1589 /* The insn to load VAR from a home in the arglist
1590 is now a no-op. When we see it, just delete it.
1591 Similarly if this is storing VAR from a register from which
1592 it was loaded in the previous insn. This will occur
1593 when an ADDRESSOF was made for an arglist slot. */
1595 && (set
= single_set (insn
)) != 0
1596 && SET_DEST (set
) == var
1597 /* If this represents the result of an insn group,
1598 don't delete the insn. */
1599 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1600 && (rtx_equal_p (SET_SRC (set
), var
)
1601 || (GET_CODE (SET_SRC (set
)) == REG
1602 && (prev
= prev_nonnote_insn (insn
)) != 0
1603 && (prev_set
= single_set (prev
)) != 0
1604 && SET_DEST (prev_set
) == SET_SRC (set
)
1605 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1607 /* In unoptimized compilation, we shouldn't call delete_insn
1608 except in jump.c doing warnings. */
1609 PUT_CODE (insn
, NOTE
);
1610 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1611 NOTE_SOURCE_FILE (insn
) = 0;
1612 if (insn
== last_parm_insn
)
1613 last_parm_insn
= PREV_INSN (next
);
1617 struct fixup_replacement
*replacements
= 0;
1618 rtx next_insn
= NEXT_INSN (insn
);
1620 if (SMALL_REGISTER_CLASSES
)
1622 /* If the insn that copies the results of a CALL_INSN
1623 into a pseudo now references VAR, we have to use an
1624 intermediate pseudo since we want the life of the
1625 return value register to be only a single insn.
1627 If we don't use an intermediate pseudo, such things as
1628 address computations to make the address of VAR valid
1629 if it is not can be placed between the CALL_INSN and INSN.
1631 To make sure this doesn't happen, we record the destination
1632 of the CALL_INSN and see if the next insn uses both that
1635 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1636 && reg_mentioned_p (var
, PATTERN (insn
))
1637 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1639 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1641 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1643 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1647 if (GET_CODE (insn
) == CALL_INSN
1648 && GET_CODE (PATTERN (insn
)) == SET
)
1649 call_dest
= SET_DEST (PATTERN (insn
));
1650 else if (GET_CODE (insn
) == CALL_INSN
1651 && GET_CODE (PATTERN (insn
)) == PARALLEL
1652 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1653 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1658 /* See if we have to do anything to INSN now that VAR is in
1659 memory. If it needs to be loaded into a pseudo, use a single
1660 pseudo for the entire insn in case there is a MATCH_DUP
1661 between two operands. We pass a pointer to the head of
1662 a list of struct fixup_replacements. If fixup_var_refs_1
1663 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1664 it will record them in this list.
1666 If it allocated a pseudo for any replacement, we copy into
1669 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1672 /* If this is last_parm_insn, and any instructions were output
1673 after it to fix it up, then we must set last_parm_insn to
1674 the last such instruction emitted. */
1675 if (insn
== last_parm_insn
)
1676 last_parm_insn
= PREV_INSN (next_insn
);
1678 while (replacements
)
1680 if (GET_CODE (replacements
->new) == REG
)
1685 /* OLD might be a (subreg (mem)). */
1686 if (GET_CODE (replacements
->old
) == SUBREG
)
1688 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1691 = fixup_stack_1 (replacements
->old
, insn
);
1693 insert_before
= insn
;
1695 /* If we are changing the mode, do a conversion.
1696 This might be wasteful, but combine.c will
1697 eliminate much of the waste. */
1699 if (GET_MODE (replacements
->new)
1700 != GET_MODE (replacements
->old
))
1703 convert_move (replacements
->new,
1704 replacements
->old
, unsignedp
);
1705 seq
= gen_sequence ();
1709 seq
= gen_move_insn (replacements
->new,
1712 emit_insn_before (seq
, insert_before
);
1715 replacements
= replacements
->next
;
1719 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1720 But don't touch other insns referred to by reg-notes;
1721 we will get them elsewhere. */
1724 if (GET_CODE (note
) != INSN_LIST
)
1726 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1727 note
= XEXP (note
, 1);
1735 insn
= XEXP (insn_list
, 0);
1736 insn_list
= XEXP (insn_list
, 1);
1743 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1744 See if the rtx expression at *LOC in INSN needs to be changed.
1746 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1747 contain a list of original rtx's and replacements. If we find that we need
1748 to modify this insn by replacing a memory reference with a pseudo or by
1749 making a new MEM to implement a SUBREG, we consult that list to see if
1750 we have already chosen a replacement. If none has already been allocated,
1751 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1752 or the SUBREG, as appropriate, to the pseudo. */
1755 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1757 enum machine_mode promoted_mode
;
1760 struct fixup_replacement
**replacements
;
1763 register rtx x
= *loc
;
1764 RTX_CODE code
= GET_CODE (x
);
1765 register const char *fmt
;
1766 register rtx tem
, tem1
;
1767 struct fixup_replacement
*replacement
;
1772 if (XEXP (x
, 0) == var
)
1774 /* Prevent sharing of rtl that might lose. */
1775 rtx sub
= copy_rtx (XEXP (var
, 0));
1777 if (! validate_change (insn
, loc
, sub
, 0))
1779 rtx y
= gen_reg_rtx (GET_MODE (sub
));
1782 /* We should be able to replace with a register or all is lost.
1783 Note that we can't use validate_change to verify this, since
1784 we're not caring for replacing all dups simultaneously. */
1785 if (! validate_replace_rtx (*loc
, y
, insn
))
1788 /* Careful! First try to recognize a direct move of the
1789 value, mimicking how things are done in gen_reload wrt
1790 PLUS. Consider what happens when insn is a conditional
1791 move instruction and addsi3 clobbers flags. */
1794 new_insn
= emit_insn (gen_rtx_SET (VOIDmode
, y
, sub
));
1795 seq
= gen_sequence ();
1798 if (recog_memoized (new_insn
) < 0)
1800 /* That failed. Fall back on force_operand and hope. */
1803 force_operand (sub
, y
);
1804 seq
= gen_sequence ();
1809 /* Don't separate setter from user. */
1810 if (PREV_INSN (insn
) && sets_cc0_p (PREV_INSN (insn
)))
1811 insn
= PREV_INSN (insn
);
1814 emit_insn_before (seq
, insn
);
1822 /* If we already have a replacement, use it. Otherwise,
1823 try to fix up this address in case it is invalid. */
1825 replacement
= find_fixup_replacement (replacements
, var
);
1826 if (replacement
->new)
1828 *loc
= replacement
->new;
1832 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1834 /* Unless we are forcing memory to register or we changed the mode,
1835 we can leave things the way they are if the insn is valid. */
1837 INSN_CODE (insn
) = -1;
1838 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1839 && recog_memoized (insn
) >= 0)
1842 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1846 /* If X contains VAR, we need to unshare it here so that we update
1847 each occurrence separately. But all identical MEMs in one insn
1848 must be replaced with the same rtx because of the possibility of
1851 if (reg_mentioned_p (var
, x
))
1853 replacement
= find_fixup_replacement (replacements
, x
);
1854 if (replacement
->new == 0)
1855 replacement
->new = copy_most_rtx (x
, var
);
1857 *loc
= x
= replacement
->new;
1873 /* Note that in some cases those types of expressions are altered
1874 by optimize_bit_field, and do not survive to get here. */
1875 if (XEXP (x
, 0) == var
1876 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1877 && SUBREG_REG (XEXP (x
, 0)) == var
))
1879 /* Get TEM as a valid MEM in the mode presently in the insn.
1881 We don't worry about the possibility of MATCH_DUP here; it
1882 is highly unlikely and would be tricky to handle. */
1885 if (GET_CODE (tem
) == SUBREG
)
1887 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1888 > GET_MODE_BITSIZE (GET_MODE (var
)))
1890 replacement
= find_fixup_replacement (replacements
, var
);
1891 if (replacement
->new == 0)
1892 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1893 SUBREG_REG (tem
) = replacement
->new;
1896 tem
= fixup_memory_subreg (tem
, insn
, 0);
1899 tem
= fixup_stack_1 (tem
, insn
);
1901 /* Unless we want to load from memory, get TEM into the proper mode
1902 for an extract from memory. This can only be done if the
1903 extract is at a constant position and length. */
1905 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1906 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1907 && ! mode_dependent_address_p (XEXP (tem
, 0))
1908 && ! MEM_VOLATILE_P (tem
))
1910 enum machine_mode wanted_mode
= VOIDmode
;
1911 enum machine_mode is_mode
= GET_MODE (tem
);
1912 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1915 if (GET_CODE (x
) == ZERO_EXTRACT
)
1918 = insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
1919 if (wanted_mode
== VOIDmode
)
1920 wanted_mode
= word_mode
;
1924 if (GET_CODE (x
) == SIGN_EXTRACT
)
1926 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
1927 if (wanted_mode
== VOIDmode
)
1928 wanted_mode
= word_mode
;
1931 /* If we have a narrower mode, we can do something. */
1932 if (wanted_mode
!= VOIDmode
1933 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1935 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
1936 rtx old_pos
= XEXP (x
, 2);
1939 /* If the bytes and bits are counted differently, we
1940 must adjust the offset. */
1941 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1942 offset
= (GET_MODE_SIZE (is_mode
)
1943 - GET_MODE_SIZE (wanted_mode
) - offset
);
1945 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1947 newmem
= gen_rtx_MEM (wanted_mode
,
1948 plus_constant (XEXP (tem
, 0), offset
));
1949 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1950 MEM_COPY_ATTRIBUTES (newmem
, tem
);
1952 /* Make the change and see if the insn remains valid. */
1953 INSN_CODE (insn
) = -1;
1954 XEXP (x
, 0) = newmem
;
1955 XEXP (x
, 2) = GEN_INT (pos
);
1957 if (recog_memoized (insn
) >= 0)
1960 /* Otherwise, restore old position. XEXP (x, 0) will be
1962 XEXP (x
, 2) = old_pos
;
1966 /* If we get here, the bitfield extract insn can't accept a memory
1967 reference. Copy the input into a register. */
1969 tem1
= gen_reg_rtx (GET_MODE (tem
));
1970 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1977 if (SUBREG_REG (x
) == var
)
1979 /* If this is a special SUBREG made because VAR was promoted
1980 from a wider mode, replace it with VAR and call ourself
1981 recursively, this time saying that the object previously
1982 had its current mode (by virtue of the SUBREG). */
1984 if (SUBREG_PROMOTED_VAR_P (x
))
1987 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
1991 /* If this SUBREG makes VAR wider, it has become a paradoxical
1992 SUBREG with VAR in memory, but these aren't allowed at this
1993 stage of the compilation. So load VAR into a pseudo and take
1994 a SUBREG of that pseudo. */
1995 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
1997 replacement
= find_fixup_replacement (replacements
, var
);
1998 if (replacement
->new == 0)
1999 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2000 SUBREG_REG (x
) = replacement
->new;
2004 /* See if we have already found a replacement for this SUBREG.
2005 If so, use it. Otherwise, make a MEM and see if the insn
2006 is recognized. If not, or if we should force MEM into a register,
2007 make a pseudo for this SUBREG. */
2008 replacement
= find_fixup_replacement (replacements
, x
);
2009 if (replacement
->new)
2011 *loc
= replacement
->new;
2015 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2017 INSN_CODE (insn
) = -1;
2018 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2021 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2027 /* First do special simplification of bit-field references. */
2028 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2029 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2030 optimize_bit_field (x
, insn
, 0);
2031 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2032 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2033 optimize_bit_field (x
, insn
, NULL_PTR
);
2035 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2036 into a register and then store it back out. */
2037 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2038 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2039 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2040 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2041 > GET_MODE_SIZE (GET_MODE (var
))))
2043 replacement
= find_fixup_replacement (replacements
, var
);
2044 if (replacement
->new == 0)
2045 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2047 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2048 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2051 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2052 insn into a pseudo and store the low part of the pseudo into VAR. */
2053 if (GET_CODE (SET_DEST (x
)) == SUBREG
2054 && SUBREG_REG (SET_DEST (x
)) == var
2055 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2056 > GET_MODE_SIZE (GET_MODE (var
))))
2058 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2059 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2066 rtx dest
= SET_DEST (x
);
2067 rtx src
= SET_SRC (x
);
2069 rtx outerdest
= dest
;
2072 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2073 || GET_CODE (dest
) == SIGN_EXTRACT
2074 || GET_CODE (dest
) == ZERO_EXTRACT
)
2075 dest
= XEXP (dest
, 0);
2077 if (GET_CODE (src
) == SUBREG
)
2078 src
= XEXP (src
, 0);
2080 /* If VAR does not appear at the top level of the SET
2081 just scan the lower levels of the tree. */
2083 if (src
!= var
&& dest
!= var
)
2086 /* We will need to rerecognize this insn. */
2087 INSN_CODE (insn
) = -1;
2090 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2092 /* Since this case will return, ensure we fixup all the
2094 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2095 insn
, replacements
);
2096 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2097 insn
, replacements
);
2098 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2099 insn
, replacements
);
2101 tem
= XEXP (outerdest
, 0);
2103 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2104 that may appear inside a ZERO_EXTRACT.
2105 This was legitimate when the MEM was a REG. */
2106 if (GET_CODE (tem
) == SUBREG
2107 && SUBREG_REG (tem
) == var
)
2108 tem
= fixup_memory_subreg (tem
, insn
, 0);
2110 tem
= fixup_stack_1 (tem
, insn
);
2112 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2113 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2114 && ! mode_dependent_address_p (XEXP (tem
, 0))
2115 && ! MEM_VOLATILE_P (tem
))
2117 enum machine_mode wanted_mode
;
2118 enum machine_mode is_mode
= GET_MODE (tem
);
2119 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2121 wanted_mode
= insn_data
[(int) CODE_FOR_insv
].operand
[0].mode
;
2122 if (wanted_mode
== VOIDmode
)
2123 wanted_mode
= word_mode
;
2125 /* If we have a narrower mode, we can do something. */
2126 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2128 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2129 rtx old_pos
= XEXP (outerdest
, 2);
2132 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2133 offset
= (GET_MODE_SIZE (is_mode
)
2134 - GET_MODE_SIZE (wanted_mode
) - offset
);
2136 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2138 newmem
= gen_rtx_MEM (wanted_mode
,
2139 plus_constant (XEXP (tem
, 0),
2141 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2142 MEM_COPY_ATTRIBUTES (newmem
, tem
);
2144 /* Make the change and see if the insn remains valid. */
2145 INSN_CODE (insn
) = -1;
2146 XEXP (outerdest
, 0) = newmem
;
2147 XEXP (outerdest
, 2) = GEN_INT (pos
);
2149 if (recog_memoized (insn
) >= 0)
2152 /* Otherwise, restore old position. XEXP (x, 0) will be
2154 XEXP (outerdest
, 2) = old_pos
;
2158 /* If we get here, the bit-field store doesn't allow memory
2159 or isn't located at a constant position. Load the value into
2160 a register, do the store, and put it back into memory. */
2162 tem1
= gen_reg_rtx (GET_MODE (tem
));
2163 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2164 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2165 XEXP (outerdest
, 0) = tem1
;
2170 /* STRICT_LOW_PART is a no-op on memory references
2171 and it can cause combinations to be unrecognizable,
2174 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2175 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2177 /* A valid insn to copy VAR into or out of a register
2178 must be left alone, to avoid an infinite loop here.
2179 If the reference to VAR is by a subreg, fix that up,
2180 since SUBREG is not valid for a memref.
2181 Also fix up the address of the stack slot.
2183 Note that we must not try to recognize the insn until
2184 after we know that we have valid addresses and no
2185 (subreg (mem ...) ...) constructs, since these interfere
2186 with determining the validity of the insn. */
2188 if ((SET_SRC (x
) == var
2189 || (GET_CODE (SET_SRC (x
)) == SUBREG
2190 && SUBREG_REG (SET_SRC (x
)) == var
))
2191 && (GET_CODE (SET_DEST (x
)) == REG
2192 || (GET_CODE (SET_DEST (x
)) == SUBREG
2193 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2194 && GET_MODE (var
) == promoted_mode
2195 && x
== single_set (insn
))
2199 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2200 if (replacement
->new)
2201 SET_SRC (x
) = replacement
->new;
2202 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2203 SET_SRC (x
) = replacement
->new
2204 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2206 SET_SRC (x
) = replacement
->new
2207 = fixup_stack_1 (SET_SRC (x
), insn
);
2209 if (recog_memoized (insn
) >= 0)
2212 /* INSN is not valid, but we know that we want to
2213 copy SET_SRC (x) to SET_DEST (x) in some way. So
2214 we generate the move and see whether it requires more
2215 than one insn. If it does, we emit those insns and
2216 delete INSN. Otherwise, we an just replace the pattern
2217 of INSN; we have already verified above that INSN has
2218 no other function that to do X. */
2220 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2221 if (GET_CODE (pat
) == SEQUENCE
)
2223 emit_insn_after (pat
, insn
);
2224 PUT_CODE (insn
, NOTE
);
2225 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2226 NOTE_SOURCE_FILE (insn
) = 0;
2229 PATTERN (insn
) = pat
;
2234 if ((SET_DEST (x
) == var
2235 || (GET_CODE (SET_DEST (x
)) == SUBREG
2236 && SUBREG_REG (SET_DEST (x
)) == var
))
2237 && (GET_CODE (SET_SRC (x
)) == REG
2238 || (GET_CODE (SET_SRC (x
)) == SUBREG
2239 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2240 && GET_MODE (var
) == promoted_mode
2241 && x
== single_set (insn
))
2245 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2246 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2248 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2250 if (recog_memoized (insn
) >= 0)
2253 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2254 if (GET_CODE (pat
) == SEQUENCE
)
2256 emit_insn_after (pat
, insn
);
2257 PUT_CODE (insn
, NOTE
);
2258 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2259 NOTE_SOURCE_FILE (insn
) = 0;
2262 PATTERN (insn
) = pat
;
2267 /* Otherwise, storing into VAR must be handled specially
2268 by storing into a temporary and copying that into VAR
2269 with a new insn after this one. Note that this case
2270 will be used when storing into a promoted scalar since
2271 the insn will now have different modes on the input
2272 and output and hence will be invalid (except for the case
2273 of setting it to a constant, which does not need any
2274 change if it is valid). We generate extra code in that case,
2275 but combine.c will eliminate it. */
2280 rtx fixeddest
= SET_DEST (x
);
2282 /* STRICT_LOW_PART can be discarded, around a MEM. */
2283 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2284 fixeddest
= XEXP (fixeddest
, 0);
2285 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2286 if (GET_CODE (fixeddest
) == SUBREG
)
2288 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2289 promoted_mode
= GET_MODE (fixeddest
);
2292 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2294 temp
= gen_reg_rtx (promoted_mode
);
2296 emit_insn_after (gen_move_insn (fixeddest
,
2297 gen_lowpart (GET_MODE (fixeddest
),
2301 SET_DEST (x
) = temp
;
2309 /* Nothing special about this RTX; fix its operands. */
2311 fmt
= GET_RTX_FORMAT (code
);
2312 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2315 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2319 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2320 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2321 insn
, replacements
);
2326 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2327 return an rtx (MEM:m1 newaddr) which is equivalent.
2328 If any insns must be emitted to compute NEWADDR, put them before INSN.
2330 UNCRITICAL nonzero means accept paradoxical subregs.
2331 This is used for subregs found inside REG_NOTES. */
2334 fixup_memory_subreg (x
, insn
, uncritical
)
2339 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2340 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2341 enum machine_mode mode
= GET_MODE (x
);
2344 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2345 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2349 if (BYTES_BIG_ENDIAN
)
2350 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2351 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2352 addr
= plus_constant (addr
, offset
);
2353 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2354 /* Shortcut if no insns need be emitted. */
2355 return change_address (SUBREG_REG (x
), mode
, addr
);
2357 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2358 emit_insn_before (gen_sequence (), insn
);
2363 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2364 Replace subexpressions of X in place.
2365 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2366 Otherwise return X, with its contents possibly altered.
2368 If any insns must be emitted to compute NEWADDR, put them before INSN.
2370 UNCRITICAL is as in fixup_memory_subreg. */
2373 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2378 register enum rtx_code code
;
2379 register const char *fmt
;
2385 code
= GET_CODE (x
);
2387 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2388 return fixup_memory_subreg (x
, insn
, uncritical
);
2390 /* Nothing special about this RTX; fix its operands. */
2392 fmt
= GET_RTX_FORMAT (code
);
2393 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2396 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2400 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2402 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2408 /* For each memory ref within X, if it refers to a stack slot
2409 with an out of range displacement, put the address in a temp register
2410 (emitting new insns before INSN to load these registers)
2411 and alter the memory ref to use that register.
2412 Replace each such MEM rtx with a copy, to avoid clobberage. */
2415 fixup_stack_1 (x
, insn
)
2420 register RTX_CODE code
= GET_CODE (x
);
2421 register const char *fmt
;
2425 register rtx ad
= XEXP (x
, 0);
2426 /* If we have address of a stack slot but it's not valid
2427 (displacement is too large), compute the sum in a register. */
2428 if (GET_CODE (ad
) == PLUS
2429 && GET_CODE (XEXP (ad
, 0)) == REG
2430 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2431 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2432 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2433 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2434 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2436 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2437 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2438 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2439 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2442 if (memory_address_p (GET_MODE (x
), ad
))
2446 temp
= copy_to_reg (ad
);
2447 seq
= gen_sequence ();
2449 emit_insn_before (seq
, insn
);
2450 return change_address (x
, VOIDmode
, temp
);
2455 fmt
= GET_RTX_FORMAT (code
);
2456 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2459 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2463 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2464 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2470 /* Optimization: a bit-field instruction whose field
2471 happens to be a byte or halfword in memory
2472 can be changed to a move instruction.
2474 We call here when INSN is an insn to examine or store into a bit-field.
2475 BODY is the SET-rtx to be altered.
2477 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2478 (Currently this is called only from function.c, and EQUIV_MEM
2482 optimize_bit_field (body
, insn
, equiv_mem
)
2487 register rtx bitfield
;
2490 enum machine_mode mode
;
2492 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2493 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2494 bitfield
= SET_DEST (body
), destflag
= 1;
2496 bitfield
= SET_SRC (body
), destflag
= 0;
2498 /* First check that the field being stored has constant size and position
2499 and is in fact a byte or halfword suitably aligned. */
2501 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2502 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2503 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2505 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2507 register rtx memref
= 0;
2509 /* Now check that the containing word is memory, not a register,
2510 and that it is safe to change the machine mode. */
2512 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2513 memref
= XEXP (bitfield
, 0);
2514 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2516 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2517 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2518 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2519 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2520 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2522 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2523 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2526 && ! mode_dependent_address_p (XEXP (memref
, 0))
2527 && ! MEM_VOLATILE_P (memref
))
2529 /* Now adjust the address, first for any subreg'ing
2530 that we are now getting rid of,
2531 and then for which byte of the word is wanted. */
2533 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2536 /* Adjust OFFSET to count bits from low-address byte. */
2537 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2538 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2539 - offset
- INTVAL (XEXP (bitfield
, 1)));
2541 /* Adjust OFFSET to count bytes from low-address byte. */
2542 offset
/= BITS_PER_UNIT
;
2543 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2545 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2546 if (BYTES_BIG_ENDIAN
)
2547 offset
-= (MIN (UNITS_PER_WORD
,
2548 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2549 - MIN (UNITS_PER_WORD
,
2550 GET_MODE_SIZE (GET_MODE (memref
))));
2554 memref
= change_address (memref
, mode
,
2555 plus_constant (XEXP (memref
, 0), offset
));
2556 insns
= get_insns ();
2558 emit_insns_before (insns
, insn
);
2560 /* Store this memory reference where
2561 we found the bit field reference. */
2565 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2566 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2568 rtx src
= SET_SRC (body
);
2569 while (GET_CODE (src
) == SUBREG
2570 && SUBREG_WORD (src
) == 0)
2571 src
= SUBREG_REG (src
);
2572 if (GET_MODE (src
) != GET_MODE (memref
))
2573 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2574 validate_change (insn
, &SET_SRC (body
), src
, 1);
2576 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2577 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2578 /* This shouldn't happen because anything that didn't have
2579 one of these modes should have got converted explicitly
2580 and then referenced through a subreg.
2581 This is so because the original bit-field was
2582 handled by agg_mode and so its tree structure had
2583 the same mode that memref now has. */
2588 rtx dest
= SET_DEST (body
);
2590 while (GET_CODE (dest
) == SUBREG
2591 && SUBREG_WORD (dest
) == 0
2592 && (GET_MODE_CLASS (GET_MODE (dest
))
2593 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
))))
2594 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2596 dest
= SUBREG_REG (dest
);
2598 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2600 if (GET_MODE (dest
) == GET_MODE (memref
))
2601 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2604 /* Convert the mem ref to the destination mode. */
2605 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2608 convert_move (newreg
, memref
,
2609 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2613 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2617 /* See if we can convert this extraction or insertion into
2618 a simple move insn. We might not be able to do so if this
2619 was, for example, part of a PARALLEL.
2621 If we succeed, write out any needed conversions. If we fail,
2622 it is hard to guess why we failed, so don't do anything
2623 special; just let the optimization be suppressed. */
2625 if (apply_change_group () && seq
)
2626 emit_insns_before (seq
, insn
);
2631 /* These routines are responsible for converting virtual register references
2632 to the actual hard register references once RTL generation is complete.
2634 The following four variables are used for communication between the
2635 routines. They contain the offsets of the virtual registers from their
2636 respective hard registers. */
2638 static int in_arg_offset
;
2639 static int var_offset
;
2640 static int dynamic_offset
;
2641 static int out_arg_offset
;
2642 static int cfa_offset
;
2644 /* In most machines, the stack pointer register is equivalent to the bottom
2647 #ifndef STACK_POINTER_OFFSET
2648 #define STACK_POINTER_OFFSET 0
2651 /* If not defined, pick an appropriate default for the offset of dynamically
2652 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2653 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2655 #ifndef STACK_DYNAMIC_OFFSET
2657 #ifdef ACCUMULATE_OUTGOING_ARGS
2658 /* The bottom of the stack points to the actual arguments. If
2659 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2660 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2661 stack space for register parameters is not pushed by the caller, but
2662 rather part of the fixed stack areas and hence not included in
2663 `current_function_outgoing_args_size'. Nevertheless, we must allow
2664 for it when allocating stack dynamic objects. */
2666 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2667 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2668 (current_function_outgoing_args_size \
2669 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2672 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2673 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2677 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2681 /* On a few machines, the CFA coincides with the arg pointer. */
2683 #ifndef ARG_POINTER_CFA_OFFSET
2684 #define ARG_POINTER_CFA_OFFSET 0
2688 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2689 its address taken. DECL is the decl for the object stored in the
2690 register, for later use if we do need to force REG into the stack.
2691 REG is overwritten by the MEM like in put_reg_into_stack. */
2694 gen_mem_addressof (reg
, decl
)
2698 tree type
= TREE_TYPE (decl
);
2699 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)),
2702 /* If the original REG was a user-variable, then so is the REG whose
2703 address is being taken. Likewise for unchanging. */
2704 REG_USERVAR_P (XEXP (r
, 0)) = REG_USERVAR_P (reg
);
2705 RTX_UNCHANGING_P (XEXP (r
, 0)) = RTX_UNCHANGING_P (reg
);
2707 PUT_CODE (reg
, MEM
);
2708 PUT_MODE (reg
, DECL_MODE (decl
));
2710 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2711 MEM_SET_IN_STRUCT_P (reg
, AGGREGATE_TYPE_P (type
));
2712 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
2714 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2715 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
), 0);
2720 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2724 flush_addressof (decl
)
2727 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2728 && DECL_RTL (decl
) != 0
2729 && GET_CODE (DECL_RTL (decl
)) == MEM
2730 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2731 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2732 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0), 0);
2736 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2739 put_addressof_into_stack (r
, ht
)
2741 struct hash_table
*ht
;
2743 tree decl
= ADDRESSOF_DECL (r
);
2744 rtx reg
= XEXP (r
, 0);
2746 if (GET_CODE (reg
) != REG
)
2749 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2750 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2751 ADDRESSOF_REGNO (r
),
2752 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0, ht
);
2755 /* List of replacements made below in purge_addressof_1 when creating
2756 bitfield insertions. */
2757 static rtx purge_bitfield_addressof_replacements
;
2759 /* List of replacements made below in purge_addressof_1 for patterns
2760 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2761 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2762 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2763 enough in complex cases, e.g. when some field values can be
2764 extracted by usage MEM with narrower mode. */
2765 static rtx purge_addressof_replacements
;
2767 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2768 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2769 the stack. If the function returns FALSE then the replacement could not
2773 purge_addressof_1 (loc
, insn
, force
, store
, ht
)
2777 struct hash_table
*ht
;
2783 boolean result
= true;
2785 /* Re-start here to avoid recursion in common cases. */
2792 code
= GET_CODE (x
);
2794 /* If we don't return in any of the cases below, we will recurse inside
2795 the RTX, which will normally result in any ADDRESSOF being forced into
2799 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
2800 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
2804 else if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2806 /* We must create a copy of the rtx because it was created by
2807 overwriting a REG rtx which is always shared. */
2808 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2811 if (validate_change (insn
, loc
, sub
, 0)
2812 || validate_replace_rtx (x
, sub
, insn
))
2816 sub
= force_operand (sub
, NULL_RTX
);
2817 if (! validate_change (insn
, loc
, sub
, 0)
2818 && ! validate_replace_rtx (x
, sub
, insn
))
2821 insns
= gen_sequence ();
2823 emit_insn_before (insns
, insn
);
2827 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2829 rtx sub
= XEXP (XEXP (x
, 0), 0);
2832 if (GET_CODE (sub
) == MEM
)
2834 sub2
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2835 MEM_COPY_ATTRIBUTES (sub2
, sub
);
2836 RTX_UNCHANGING_P (sub2
) = RTX_UNCHANGING_P (sub
);
2839 else if (GET_CODE (sub
) == REG
2840 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2842 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2844 int size_x
, size_sub
;
2848 /* When processing REG_NOTES look at the list of
2849 replacements done on the insn to find the register that X
2853 for (tem
= purge_bitfield_addressof_replacements
;
2855 tem
= XEXP (XEXP (tem
, 1), 1))
2856 if (rtx_equal_p (x
, XEXP (tem
, 0)))
2858 *loc
= XEXP (XEXP (tem
, 1), 0);
2862 /* See comment for purge_addressof_replacements. */
2863 for (tem
= purge_addressof_replacements
;
2865 tem
= XEXP (XEXP (tem
, 1), 1))
2866 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
2868 rtx z
= XEXP (XEXP (tem
, 1), 0);
2870 if (GET_MODE (x
) == GET_MODE (z
)
2871 || (GET_CODE (XEXP (XEXP (tem
, 1), 0)) != REG
2872 && GET_CODE (XEXP (XEXP (tem
, 1), 0)) != SUBREG
))
2875 /* It can happen that the note may speak of things
2876 in a wider (or just different) mode than the
2877 code did. This is especially true of
2880 if (GET_CODE (z
) == SUBREG
&& SUBREG_WORD (z
) == 0)
2883 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2884 && (GET_MODE_SIZE (GET_MODE (x
))
2885 > GET_MODE_SIZE (GET_MODE (z
))))
2887 /* This can occur as a result in invalid
2888 pointer casts, e.g. float f; ...
2889 *(long long int *)&f.
2890 ??? We could emit a warning here, but
2891 without a line number that wouldn't be
2893 z
= gen_rtx_SUBREG (GET_MODE (x
), z
, 0);
2896 z
= gen_lowpart (GET_MODE (x
), z
);
2902 /* Sometimes we may not be able to find the replacement. For
2903 example when the original insn was a MEM in a wider mode,
2904 and the note is part of a sign extension of a narrowed
2905 version of that MEM. Gcc testcase compile/990829-1.c can
2906 generate an example of this siutation. Rather than complain
2907 we return false, which will prompt our caller to remove the
2912 size_x
= GET_MODE_BITSIZE (GET_MODE (x
));
2913 size_sub
= GET_MODE_BITSIZE (GET_MODE (sub
));
2915 /* Don't even consider working with paradoxical subregs,
2916 or the moral equivalent seen here. */
2917 if (size_x
<= size_sub
2918 && int_mode_for_mode (GET_MODE (sub
)) != BLKmode
)
2920 /* Do a bitfield insertion to mirror what would happen
2927 rtx p
= PREV_INSN (insn
);
2930 val
= gen_reg_rtx (GET_MODE (x
));
2931 if (! validate_change (insn
, loc
, val
, 0))
2933 /* Discard the current sequence and put the
2934 ADDRESSOF on stack. */
2938 seq
= gen_sequence ();
2940 emit_insn_before (seq
, insn
);
2941 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
2945 store_bit_field (sub
, size_x
, 0, GET_MODE (x
),
2946 val
, GET_MODE_SIZE (GET_MODE (sub
)),
2947 GET_MODE_SIZE (GET_MODE (sub
)));
2949 /* Make sure to unshare any shared rtl that store_bit_field
2950 might have created. */
2951 for (p
= get_insns(); p
; p
= NEXT_INSN (p
))
2953 reset_used_flags (PATTERN (p
));
2954 reset_used_flags (REG_NOTES (p
));
2955 reset_used_flags (LOG_LINKS (p
));
2957 unshare_all_rtl (get_insns ());
2959 seq
= gen_sequence ();
2961 p
= emit_insn_after (seq
, insn
);
2962 if (NEXT_INSN (insn
))
2963 compute_insns_for_mem (NEXT_INSN (insn
),
2964 p
? NEXT_INSN (p
) : NULL_RTX
,
2969 rtx p
= PREV_INSN (insn
);
2972 val
= extract_bit_field (sub
, size_x
, 0, 1, NULL_RTX
,
2973 GET_MODE (x
), GET_MODE (x
),
2974 GET_MODE_SIZE (GET_MODE (sub
)),
2975 GET_MODE_SIZE (GET_MODE (sub
)));
2977 if (! validate_change (insn
, loc
, val
, 0))
2979 /* Discard the current sequence and put the
2980 ADDRESSOF on stack. */
2985 seq
= gen_sequence ();
2987 emit_insn_before (seq
, insn
);
2988 compute_insns_for_mem (p
? NEXT_INSN (p
) : get_insns (),
2992 /* Remember the replacement so that the same one can be done
2993 on the REG_NOTES. */
2994 purge_bitfield_addressof_replacements
2995 = gen_rtx_EXPR_LIST (VOIDmode
, x
,
2998 purge_bitfield_addressof_replacements
));
3000 /* We replaced with a reg -- all done. */
3005 else if (validate_change (insn
, loc
, sub
, 0))
3007 /* Remember the replacement so that the same one can be done
3008 on the REG_NOTES. */
3009 if (GET_CODE (sub
) == REG
|| GET_CODE (sub
) == SUBREG
)
3013 for (tem
= purge_addressof_replacements
;
3015 tem
= XEXP (XEXP (tem
, 1), 1))
3016 if (rtx_equal_p (XEXP (x
, 0), XEXP (tem
, 0)))
3018 XEXP (XEXP (tem
, 1), 0) = sub
;
3021 purge_addressof_replacements
3022 = gen_rtx (EXPR_LIST
, VOIDmode
, XEXP (x
, 0),
3023 gen_rtx_EXPR_LIST (VOIDmode
, sub
,
3024 purge_addressof_replacements
));
3030 /* else give up and put it into the stack */
3033 else if (code
== ADDRESSOF
)
3035 put_addressof_into_stack (x
, ht
);
3038 else if (code
== SET
)
3040 result
= purge_addressof_1 (&SET_DEST (x
), insn
, force
, 1, ht
);
3041 result
&= purge_addressof_1 (&SET_SRC (x
), insn
, force
, 0, ht
);
3045 /* Scan all subexpressions. */
3046 fmt
= GET_RTX_FORMAT (code
);
3047 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3050 result
&= purge_addressof_1 (&XEXP (x
, i
), insn
, force
, 0, ht
);
3051 else if (*fmt
== 'E')
3052 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3053 result
&= purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
, 0, ht
);
3059 /* Return a new hash table entry in HT. */
3061 static struct hash_entry
*
3062 insns_for_mem_newfunc (he
, ht
, k
)
3063 struct hash_entry
*he
;
3064 struct hash_table
*ht
;
3065 hash_table_key k ATTRIBUTE_UNUSED
;
3067 struct insns_for_mem_entry
*ifmhe
;
3071 ifmhe
= ((struct insns_for_mem_entry
*)
3072 hash_allocate (ht
, sizeof (struct insns_for_mem_entry
)));
3073 ifmhe
->insns
= NULL_RTX
;
3078 /* Return a hash value for K, a REG. */
3080 static unsigned long
3081 insns_for_mem_hash (k
)
3084 /* K is really a RTX. Just use the address as the hash value. */
3085 return (unsigned long) k
;
3088 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3091 insns_for_mem_comp (k1
, k2
)
3098 struct insns_for_mem_walk_info
{
3099 /* The hash table that we are using to record which INSNs use which
3101 struct hash_table
*ht
;
3103 /* The INSN we are currently proessing. */
3106 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3107 to find the insns that use the REGs in the ADDRESSOFs. */
3111 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3112 that might be used in an ADDRESSOF expression, record this INSN in
3113 the hash table given by DATA (which is really a pointer to an
3114 insns_for_mem_walk_info structure). */
3117 insns_for_mem_walk (r
, data
)
3121 struct insns_for_mem_walk_info
*ifmwi
3122 = (struct insns_for_mem_walk_info
*) data
;
3124 if (ifmwi
->pass
== 0 && *r
&& GET_CODE (*r
) == ADDRESSOF
3125 && GET_CODE (XEXP (*r
, 0)) == REG
)
3126 hash_lookup (ifmwi
->ht
, XEXP (*r
, 0), /*create=*/1, /*copy=*/0);
3127 else if (ifmwi
->pass
== 1 && *r
&& GET_CODE (*r
) == REG
)
3129 /* Lookup this MEM in the hashtable, creating it if necessary. */
3130 struct insns_for_mem_entry
*ifme
3131 = (struct insns_for_mem_entry
*) hash_lookup (ifmwi
->ht
,
3136 /* If we have not already recorded this INSN, do so now. Since
3137 we process the INSNs in order, we know that if we have
3138 recorded it it must be at the front of the list. */
3139 if (ifme
&& (!ifme
->insns
|| XEXP (ifme
->insns
, 0) != ifmwi
->insn
))
3141 /* We do the allocation on the same obstack as is used for
3142 the hash table since this memory will not be used once
3143 the hash table is deallocated. */
3144 push_obstacks (&ifmwi
->ht
->memory
, &ifmwi
->ht
->memory
);
3145 ifme
->insns
= gen_rtx_EXPR_LIST (VOIDmode
, ifmwi
->insn
,
3154 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3155 which REGs in HT. */
3158 compute_insns_for_mem (insns
, last_insn
, ht
)
3161 struct hash_table
*ht
;
3164 struct insns_for_mem_walk_info ifmwi
;
3167 for (ifmwi
.pass
= 0; ifmwi
.pass
< 2; ++ifmwi
.pass
)
3168 for (insn
= insns
; insn
!= last_insn
; insn
= NEXT_INSN (insn
))
3169 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3172 for_each_rtx (&insn
, insns_for_mem_walk
, &ifmwi
);
3176 /* Helper function for purge_addressof called through for_each_rtx.
3177 Returns true iff the rtl is an ADDRESSOF. */
3179 is_addressof (rtl
, data
)
3181 void * data ATTRIBUTE_UNUSED
;
3183 return GET_CODE (* rtl
) == ADDRESSOF
;
3186 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3187 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3191 purge_addressof (insns
)
3195 struct hash_table ht
;
3197 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3198 requires a fixup pass over the instruction stream to correct
3199 INSNs that depended on the REG being a REG, and not a MEM. But,
3200 these fixup passes are slow. Furthermore, more MEMs are not
3201 mentioned in very many instructions. So, we speed up the process
3202 by pre-calculating which REGs occur in which INSNs; that allows
3203 us to perform the fixup passes much more quickly. */
3204 hash_table_init (&ht
,
3205 insns_for_mem_newfunc
,
3207 insns_for_mem_comp
);
3208 compute_insns_for_mem (insns
, NULL_RTX
, &ht
);
3210 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3211 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3212 || GET_CODE (insn
) == CALL_INSN
)
3214 if (! purge_addressof_1 (&PATTERN (insn
), insn
,
3215 asm_noperands (PATTERN (insn
)) > 0, 0, &ht
))
3216 /* If we could not replace the ADDRESSOFs in the insn,
3217 something is wrong. */
3220 if (! purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0, 0, &ht
))
3222 /* If we could not replace the ADDRESSOFs in the insn's notes,
3223 we can just remove the offending notes instead. */
3226 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
3228 /* If we find a REG_RETVAL note then the insn is a libcall.
3229 Such insns must have REG_EQUAL notes as well, in order
3230 for later passes of the compiler to work. So it is not
3231 safe to delete the notes here, and instead we abort. */
3232 if (REG_NOTE_KIND (note
) == REG_RETVAL
)
3234 if (for_each_rtx (& note
, is_addressof
, NULL
))
3235 remove_note (insn
, note
);
3241 hash_table_free (&ht
);
3242 purge_bitfield_addressof_replacements
= 0;
3243 purge_addressof_replacements
= 0;
3246 /* Pass through the INSNS of function FNDECL and convert virtual register
3247 references to hard register references. */
3250 instantiate_virtual_regs (fndecl
, insns
)
3257 /* Compute the offsets to use for this function. */
3258 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
3259 var_offset
= STARTING_FRAME_OFFSET
;
3260 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
3261 out_arg_offset
= STACK_POINTER_OFFSET
;
3262 cfa_offset
= ARG_POINTER_CFA_OFFSET
;
3264 /* Scan all variables and parameters of this function. For each that is
3265 in memory, instantiate all virtual registers if the result is a valid
3266 address. If not, we do it later. That will handle most uses of virtual
3267 regs on many machines. */
3268 instantiate_decls (fndecl
, 1);
3270 /* Initialize recognition, indicating that volatile is OK. */
3273 /* Scan through all the insns, instantiating every virtual register still
3275 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3276 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
3277 || GET_CODE (insn
) == CALL_INSN
)
3279 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
3280 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
3283 /* Instantiate the stack slots for the parm registers, for later use in
3284 addressof elimination. */
3285 for (i
= 0; i
< max_parm_reg
; ++i
)
3286 if (parm_reg_stack_loc
[i
])
3287 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
3289 /* Now instantiate the remaining register equivalences for debugging info.
3290 These will not be valid addresses. */
3291 instantiate_decls (fndecl
, 0);
3293 /* Indicate that, from now on, assign_stack_local should use
3294 frame_pointer_rtx. */
3295 virtuals_instantiated
= 1;
3298 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3299 all virtual registers in their DECL_RTL's.
3301 If VALID_ONLY, do this only if the resulting address is still valid.
3302 Otherwise, always do it. */
3305 instantiate_decls (fndecl
, valid_only
)
3311 if (DECL_SAVED_INSNS (fndecl
))
3312 /* When compiling an inline function, the obstack used for
3313 rtl allocation is the maybepermanent_obstack. Calling
3314 `resume_temporary_allocation' switches us back to that
3315 obstack while we process this function's parameters. */
3316 resume_temporary_allocation ();
3318 /* Process all parameters of the function. */
3319 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
3321 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
3323 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
3325 /* If the parameter was promoted, then the incoming RTL mode may be
3326 larger than the declared type size. We must use the larger of
3328 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
3329 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
3332 /* Now process all variables defined in the function or its subblocks. */
3333 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
3335 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
3337 /* Save all rtl allocated for this function by raising the
3338 high-water mark on the maybepermanent_obstack. */
3340 /* All further rtl allocation is now done in the current_obstack. */
3341 rtl_in_current_obstack ();
3345 /* Subroutine of instantiate_decls: Process all decls in the given
3346 BLOCK node and all its subblocks. */
3349 instantiate_decls_1 (let
, valid_only
)
3355 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
3356 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
3359 /* Process all subblocks. */
3360 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3361 instantiate_decls_1 (t
, valid_only
);
3364 /* Subroutine of the preceding procedures: Given RTL representing a
3365 decl and the size of the object, do any instantiation required.
3367 If VALID_ONLY is non-zero, it means that the RTL should only be
3368 changed if the new address is valid. */
3371 instantiate_decl (x
, size
, valid_only
)
3376 enum machine_mode mode
;
3379 /* If this is not a MEM, no need to do anything. Similarly if the
3380 address is a constant or a register that is not a virtual register. */
3382 if (x
== 0 || GET_CODE (x
) != MEM
)
3386 if (CONSTANT_P (addr
)
3387 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3388 || (GET_CODE (addr
) == REG
3389 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3390 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3393 /* If we should only do this if the address is valid, copy the address.
3394 We need to do this so we can undo any changes that might make the
3395 address invalid. This copy is unfortunate, but probably can't be
3399 addr
= copy_rtx (addr
);
3401 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3405 /* Now verify that the resulting address is valid for every integer or
3406 floating-point mode up to and including SIZE bytes long. We do this
3407 since the object might be accessed in any mode and frame addresses
3410 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3411 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3412 mode
= GET_MODE_WIDER_MODE (mode
))
3413 if (! memory_address_p (mode
, addr
))
3416 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3417 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3418 mode
= GET_MODE_WIDER_MODE (mode
))
3419 if (! memory_address_p (mode
, addr
))
3423 /* Put back the address now that we have updated it and we either know
3424 it is valid or we don't care whether it is valid. */
3429 /* Given a pointer to a piece of rtx and an optional pointer to the
3430 containing object, instantiate any virtual registers present in it.
3432 If EXTRA_INSNS, we always do the replacement and generate
3433 any extra insns before OBJECT. If it zero, we do nothing if replacement
3436 Return 1 if we either had nothing to do or if we were able to do the
3437 needed replacement. Return 0 otherwise; we only return zero if
3438 EXTRA_INSNS is zero.
3440 We first try some simple transformations to avoid the creation of extra
3444 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3452 HOST_WIDE_INT offset
= 0;
3458 /* Re-start here to avoid recursion in common cases. */
3465 code
= GET_CODE (x
);
3467 /* Check for some special cases. */
3484 /* We are allowed to set the virtual registers. This means that
3485 the actual register should receive the source minus the
3486 appropriate offset. This is used, for example, in the handling
3487 of non-local gotos. */
3488 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3489 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3490 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3491 new = frame_pointer_rtx
, offset
= - var_offset
;
3492 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3493 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3494 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3495 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3496 else if (SET_DEST (x
) == virtual_cfa_rtx
)
3497 new = arg_pointer_rtx
, offset
= - cfa_offset
;
3501 rtx src
= SET_SRC (x
);
3503 instantiate_virtual_regs_1 (&src
, NULL_RTX
, 0);
3505 /* The only valid sources here are PLUS or REG. Just do
3506 the simplest possible thing to handle them. */
3507 if (GET_CODE (src
) != REG
&& GET_CODE (src
) != PLUS
)
3511 if (GET_CODE (src
) != REG
)
3512 temp
= force_operand (src
, NULL_RTX
);
3515 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3519 emit_insns_before (seq
, object
);
3522 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3529 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3534 /* Handle special case of virtual register plus constant. */
3535 if (CONSTANT_P (XEXP (x
, 1)))
3537 rtx old
, new_offset
;
3539 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3540 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3542 rtx inner
= XEXP (XEXP (x
, 0), 0);
3544 if (inner
== virtual_incoming_args_rtx
)
3545 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3546 else if (inner
== virtual_stack_vars_rtx
)
3547 new = frame_pointer_rtx
, offset
= var_offset
;
3548 else if (inner
== virtual_stack_dynamic_rtx
)
3549 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3550 else if (inner
== virtual_outgoing_args_rtx
)
3551 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3552 else if (inner
== virtual_cfa_rtx
)
3553 new = arg_pointer_rtx
, offset
= cfa_offset
;
3560 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3562 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3565 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3566 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3567 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3568 new = frame_pointer_rtx
, offset
= var_offset
;
3569 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3570 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3571 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3572 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3573 else if (XEXP (x
, 0) == virtual_cfa_rtx
)
3574 new = arg_pointer_rtx
, offset
= cfa_offset
;
3577 /* We know the second operand is a constant. Unless the
3578 first operand is a REG (which has been already checked),
3579 it needs to be checked. */
3580 if (GET_CODE (XEXP (x
, 0)) != REG
)
3588 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3590 /* If the new constant is zero, try to replace the sum with just
3592 if (new_offset
== const0_rtx
3593 && validate_change (object
, loc
, new, 0))
3596 /* Next try to replace the register and new offset.
3597 There are two changes to validate here and we can't assume that
3598 in the case of old offset equals new just changing the register
3599 will yield a valid insn. In the interests of a little efficiency,
3600 however, we only call validate change once (we don't queue up the
3601 changes and then call apply_change_group). */
3605 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3606 : (XEXP (x
, 0) = new,
3607 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3615 /* Otherwise copy the new constant into a register and replace
3616 constant with that register. */
3617 temp
= gen_reg_rtx (Pmode
);
3619 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3620 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3623 /* If that didn't work, replace this expression with a
3624 register containing the sum. */
3627 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3630 temp
= force_operand (new, NULL_RTX
);
3634 emit_insns_before (seq
, object
);
3635 if (! validate_change (object
, loc
, temp
, 0)
3636 && ! validate_replace_rtx (x
, temp
, object
))
3644 /* Fall through to generic two-operand expression case. */
3650 case DIV
: case UDIV
:
3651 case MOD
: case UMOD
:
3652 case AND
: case IOR
: case XOR
:
3653 case ROTATERT
: case ROTATE
:
3654 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3656 case GE
: case GT
: case GEU
: case GTU
:
3657 case LE
: case LT
: case LEU
: case LTU
:
3658 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3659 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3664 /* Most cases of MEM that convert to valid addresses have already been
3665 handled by our scan of decls. The only special handling we
3666 need here is to make a copy of the rtx to ensure it isn't being
3667 shared if we have to change it to a pseudo.
3669 If the rtx is a simple reference to an address via a virtual register,
3670 it can potentially be shared. In such cases, first try to make it
3671 a valid address, which can also be shared. Otherwise, copy it and
3674 First check for common cases that need no processing. These are
3675 usually due to instantiation already being done on a previous instance
3679 if (CONSTANT_ADDRESS_P (temp
)
3680 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3681 || temp
== arg_pointer_rtx
3683 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3684 || temp
== hard_frame_pointer_rtx
3686 || temp
== frame_pointer_rtx
)
3689 if (GET_CODE (temp
) == PLUS
3690 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3691 && (XEXP (temp
, 0) == frame_pointer_rtx
3692 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3693 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3695 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3696 || XEXP (temp
, 0) == arg_pointer_rtx
3701 if (temp
== virtual_stack_vars_rtx
3702 || temp
== virtual_incoming_args_rtx
3703 || (GET_CODE (temp
) == PLUS
3704 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3705 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3706 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3708 /* This MEM may be shared. If the substitution can be done without
3709 the need to generate new pseudos, we want to do it in place
3710 so all copies of the shared rtx benefit. The call below will
3711 only make substitutions if the resulting address is still
3714 Note that we cannot pass X as the object in the recursive call
3715 since the insn being processed may not allow all valid
3716 addresses. However, if we were not passed on object, we can
3717 only modify X without copying it if X will have a valid
3720 ??? Also note that this can still lose if OBJECT is an insn that
3721 has less restrictions on an address that some other insn.
3722 In that case, we will modify the shared address. This case
3723 doesn't seem very likely, though. One case where this could
3724 happen is in the case of a USE or CLOBBER reference, but we
3725 take care of that below. */
3727 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3728 object
? object
: x
, 0))
3731 /* Otherwise make a copy and process that copy. We copy the entire
3732 RTL expression since it might be a PLUS which could also be
3734 *loc
= x
= copy_rtx (x
);
3737 /* Fall through to generic unary operation case. */
3739 case STRICT_LOW_PART
:
3741 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3742 case SIGN_EXTEND
: case ZERO_EXTEND
:
3743 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3744 case FLOAT
: case FIX
:
3745 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3749 /* These case either have just one operand or we know that we need not
3750 check the rest of the operands. */
3756 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3757 go ahead and make the invalid one, but do it to a copy. For a REG,
3758 just make the recursive call, since there's no chance of a problem. */
3760 if ((GET_CODE (XEXP (x
, 0)) == MEM
3761 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3763 || (GET_CODE (XEXP (x
, 0)) == REG
3764 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3767 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3772 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3773 in front of this insn and substitute the temporary. */
3774 if (x
== virtual_incoming_args_rtx
)
3775 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3776 else if (x
== virtual_stack_vars_rtx
)
3777 new = frame_pointer_rtx
, offset
= var_offset
;
3778 else if (x
== virtual_stack_dynamic_rtx
)
3779 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3780 else if (x
== virtual_outgoing_args_rtx
)
3781 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3782 else if (x
== virtual_cfa_rtx
)
3783 new = arg_pointer_rtx
, offset
= cfa_offset
;
3787 temp
= plus_constant (new, offset
);
3788 if (!validate_change (object
, loc
, temp
, 0))
3794 temp
= force_operand (temp
, NULL_RTX
);
3798 emit_insns_before (seq
, object
);
3799 if (! validate_change (object
, loc
, temp
, 0)
3800 && ! validate_replace_rtx (x
, temp
, object
))
3808 if (GET_CODE (XEXP (x
, 0)) == REG
)
3811 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3813 /* If we have a (addressof (mem ..)), do any instantiation inside
3814 since we know we'll be making the inside valid when we finally
3815 remove the ADDRESSOF. */
3816 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3825 /* Scan all subexpressions. */
3826 fmt
= GET_RTX_FORMAT (code
);
3827 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3830 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3833 else if (*fmt
== 'E')
3834 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3835 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3842 /* Optimization: assuming this function does not receive nonlocal gotos,
3843 delete the handlers for such, as well as the insns to establish
3844 and disestablish them. */
3850 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3852 /* Delete the handler by turning off the flag that would
3853 prevent jump_optimize from deleting it.
3854 Also permit deletion of the nonlocal labels themselves
3855 if nothing local refers to them. */
3856 if (GET_CODE (insn
) == CODE_LABEL
)
3860 LABEL_PRESERVE_P (insn
) = 0;
3862 /* Remove it from the nonlocal_label list, to avoid confusing
3864 for (t
= nonlocal_labels
, last_t
= 0; t
;
3865 last_t
= t
, t
= TREE_CHAIN (t
))
3866 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3871 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3873 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3876 if (GET_CODE (insn
) == INSN
)
3880 for (t
= nonlocal_goto_handler_slots
; t
!= 0; t
= XEXP (t
, 1))
3881 if (reg_mentioned_p (t
, PATTERN (insn
)))
3887 || (nonlocal_goto_stack_level
!= 0
3888 && reg_mentioned_p (nonlocal_goto_stack_level
,
3895 /* Output a USE for any register use in RTL.
3896 This is used with -noreg to mark the extent of lifespan
3897 of any registers used in a user-visible variable's DECL_RTL. */
3903 if (GET_CODE (rtl
) == REG
)
3904 /* This is a register variable. */
3905 emit_insn (gen_rtx_USE (VOIDmode
, rtl
));
3906 else if (GET_CODE (rtl
) == MEM
3907 && GET_CODE (XEXP (rtl
, 0)) == REG
3908 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3909 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3910 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3911 /* This is a variable-sized structure. */
3912 emit_insn (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)));
3915 /* Like use_variable except that it outputs the USEs after INSN
3916 instead of at the end of the insn-chain. */
3919 use_variable_after (rtl
, insn
)
3922 if (GET_CODE (rtl
) == REG
)
3923 /* This is a register variable. */
3924 emit_insn_after (gen_rtx_USE (VOIDmode
, rtl
), insn
);
3925 else if (GET_CODE (rtl
) == MEM
3926 && GET_CODE (XEXP (rtl
, 0)) == REG
3927 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3928 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3929 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3930 /* This is a variable-sized structure. */
3931 emit_insn_after (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)), insn
);
3937 return max_parm_reg
;
3940 /* Return the first insn following those generated by `assign_parms'. */
3943 get_first_nonparm_insn ()
3946 return NEXT_INSN (last_parm_insn
);
3947 return get_insns ();
3950 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3951 Crash if there is none. */
3954 get_first_block_beg ()
3956 register rtx searcher
;
3957 register rtx insn
= get_first_nonparm_insn ();
3959 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3960 if (GET_CODE (searcher
) == NOTE
3961 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3964 abort (); /* Invalid call to this function. (See comments above.) */
3968 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3969 This means a type for which function calls must pass an address to the
3970 function or get an address back from the function.
3971 EXP may be a type node or an expression (whose type is tested). */
3974 aggregate_value_p (exp
)
3977 int i
, regno
, nregs
;
3980 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3983 type
= TREE_TYPE (exp
);
3985 if (RETURN_IN_MEMORY (type
))
3987 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3988 and thus can't be returned in registers. */
3989 if (TREE_ADDRESSABLE (type
))
3991 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3993 /* Make sure we have suitable call-clobbered regs to return
3994 the value in; if not, we must return it in memory. */
3995 reg
= hard_function_value (type
, 0, 0);
3997 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3999 if (GET_CODE (reg
) != REG
)
4002 regno
= REGNO (reg
);
4003 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
4004 for (i
= 0; i
< nregs
; i
++)
4005 if (! call_used_regs
[regno
+ i
])
4010 /* Assign RTL expressions to the function's parameters.
4011 This may involve copying them into registers and using
4012 those registers as the RTL for them. */
4015 assign_parms (fndecl
)
4019 register rtx entry_parm
= 0;
4020 register rtx stack_parm
= 0;
4021 CUMULATIVE_ARGS args_so_far
;
4022 enum machine_mode promoted_mode
, passed_mode
;
4023 enum machine_mode nominal_mode
, promoted_nominal_mode
;
4025 /* Total space needed so far for args on the stack,
4026 given as a constant and a tree-expression. */
4027 struct args_size stack_args_size
;
4028 tree fntype
= TREE_TYPE (fndecl
);
4029 tree fnargs
= DECL_ARGUMENTS (fndecl
);
4030 /* This is used for the arg pointer when referring to stack args. */
4031 rtx internal_arg_pointer
;
4032 /* This is a dummy PARM_DECL that we used for the function result if
4033 the function returns a structure. */
4034 tree function_result_decl
= 0;
4035 #ifdef SETUP_INCOMING_VARARGS
4036 int varargs_setup
= 0;
4038 rtx conversion_insns
= 0;
4039 struct args_size alignment_pad
;
4041 /* Nonzero if the last arg is named `__builtin_va_alist',
4042 which is used on some machines for old-fashioned non-ANSI varargs.h;
4043 this should be stuck onto the stack as if it had arrived there. */
4045 = (current_function_varargs
4047 && (parm
= tree_last (fnargs
)) != 0
4049 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
4050 "__builtin_va_alist")));
4052 /* Nonzero if function takes extra anonymous args.
4053 This means the last named arg must be on the stack
4054 right before the anonymous ones. */
4056 = (TYPE_ARG_TYPES (fntype
) != 0
4057 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4058 != void_type_node
));
4060 current_function_stdarg
= stdarg
;
4062 /* If the reg that the virtual arg pointer will be translated into is
4063 not a fixed reg or is the stack pointer, make a copy of the virtual
4064 arg pointer, and address parms via the copy. The frame pointer is
4065 considered fixed even though it is not marked as such.
4067 The second time through, simply use ap to avoid generating rtx. */
4069 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
4070 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
4071 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
4072 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
4074 internal_arg_pointer
= virtual_incoming_args_rtx
;
4075 current_function_internal_arg_pointer
= internal_arg_pointer
;
4077 stack_args_size
.constant
= 0;
4078 stack_args_size
.var
= 0;
4080 /* If struct value address is treated as the first argument, make it so. */
4081 if (aggregate_value_p (DECL_RESULT (fndecl
))
4082 && ! current_function_returns_pcc_struct
4083 && struct_value_incoming_rtx
== 0)
4085 tree type
= build_pointer_type (TREE_TYPE (fntype
));
4087 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
4089 DECL_ARG_TYPE (function_result_decl
) = type
;
4090 TREE_CHAIN (function_result_decl
) = fnargs
;
4091 fnargs
= function_result_decl
;
4094 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4095 parm_reg_stack_loc
= (rtx
*) xcalloc (max_parm_reg
, sizeof (rtx
));
4097 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4098 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
4100 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
4103 /* We haven't yet found an argument that we must push and pretend the
4105 current_function_pretend_args_size
= 0;
4107 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
4109 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
4110 struct args_size stack_offset
;
4111 struct args_size arg_size
;
4112 int passed_pointer
= 0;
4113 int did_conversion
= 0;
4114 tree passed_type
= DECL_ARG_TYPE (parm
);
4115 tree nominal_type
= TREE_TYPE (parm
);
4118 /* Set LAST_NAMED if this is last named arg before some
4120 int last_named
= ((TREE_CHAIN (parm
) == 0
4121 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
4122 && (stdarg
|| current_function_varargs
));
4123 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4124 most machines, if this is a varargs/stdarg function, then we treat
4125 the last named arg as if it were anonymous too. */
4126 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
4128 if (TREE_TYPE (parm
) == error_mark_node
4129 /* This can happen after weird syntax errors
4130 or if an enum type is defined among the parms. */
4131 || TREE_CODE (parm
) != PARM_DECL
4132 || passed_type
== NULL
)
4134 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
4135 = gen_rtx_MEM (BLKmode
, const0_rtx
);
4136 TREE_USED (parm
) = 1;
4140 /* For varargs.h function, save info about regs and stack space
4141 used by the individual args, not including the va_alist arg. */
4142 if (hide_last_arg
&& last_named
)
4143 current_function_args_info
= args_so_far
;
4145 /* Find mode of arg as it is passed, and mode of arg
4146 as it should be during execution of this function. */
4147 passed_mode
= TYPE_MODE (passed_type
);
4148 nominal_mode
= TYPE_MODE (nominal_type
);
4150 /* If the parm's mode is VOID, its value doesn't matter,
4151 and avoid the usual things like emit_move_insn that could crash. */
4152 if (nominal_mode
== VOIDmode
)
4154 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
4158 /* If the parm is to be passed as a transparent union, use the
4159 type of the first field for the tests below. We have already
4160 verified that the modes are the same. */
4161 if (DECL_TRANSPARENT_UNION (parm
)
4162 || TYPE_TRANSPARENT_UNION (passed_type
))
4163 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
4165 /* See if this arg was passed by invisible reference. It is if
4166 it is an object whose size depends on the contents of the
4167 object itself or if the machine requires these objects be passed
4170 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
4171 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
4172 || TREE_ADDRESSABLE (passed_type
)
4173 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4174 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
4175 passed_type
, named_arg
)
4179 passed_type
= nominal_type
= build_pointer_type (passed_type
);
4181 passed_mode
= nominal_mode
= Pmode
;
4184 promoted_mode
= passed_mode
;
4186 #ifdef PROMOTE_FUNCTION_ARGS
4187 /* Compute the mode in which the arg is actually extended to. */
4188 unsignedp
= TREE_UNSIGNED (passed_type
);
4189 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
4192 /* Let machine desc say which reg (if any) the parm arrives in.
4193 0 means it arrives on the stack. */
4194 #ifdef FUNCTION_INCOMING_ARG
4195 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4196 passed_type
, named_arg
);
4198 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
4199 passed_type
, named_arg
);
4202 if (entry_parm
== 0)
4203 promoted_mode
= passed_mode
;
4205 #ifdef SETUP_INCOMING_VARARGS
4206 /* If this is the last named parameter, do any required setup for
4207 varargs or stdargs. We need to know about the case of this being an
4208 addressable type, in which case we skip the registers it
4209 would have arrived in.
4211 For stdargs, LAST_NAMED will be set for two parameters, the one that
4212 is actually the last named, and the dummy parameter. We only
4213 want to do this action once.
4215 Also, indicate when RTL generation is to be suppressed. */
4216 if (last_named
&& !varargs_setup
)
4218 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
4219 current_function_pretend_args_size
, 0);
4224 /* Determine parm's home in the stack,
4225 in case it arrives in the stack or we should pretend it did.
4227 Compute the stack position and rtx where the argument arrives
4230 There is one complexity here: If this was a parameter that would
4231 have been passed in registers, but wasn't only because it is
4232 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4233 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4234 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4235 0 as it was the previous time. */
4237 pretend_named
= named_arg
|| PRETEND_OUTGOING_VARARGS_NAMED
;
4238 locate_and_pad_parm (promoted_mode
, passed_type
,
4239 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4242 #ifdef FUNCTION_INCOMING_ARG
4243 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
4245 pretend_named
) != 0,
4247 FUNCTION_ARG (args_so_far
, promoted_mode
,
4249 pretend_named
) != 0,
4252 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
,
4256 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4258 if (offset_rtx
== const0_rtx
)
4259 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
4261 stack_parm
= gen_rtx_MEM (promoted_mode
,
4262 gen_rtx_PLUS (Pmode
,
4263 internal_arg_pointer
,
4266 /* If this is a memory ref that contains aggregate components,
4267 mark it as such for cse and loop optimize. Likewise if it
4269 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4270 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
4271 MEM_ALIAS_SET (stack_parm
) = get_alias_set (parm
);
4274 /* If this parameter was passed both in registers and in the stack,
4275 use the copy on the stack. */
4276 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
4279 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4280 /* If this parm was passed part in regs and part in memory,
4281 pretend it arrived entirely in memory
4282 by pushing the register-part onto the stack.
4284 In the special case of a DImode or DFmode that is split,
4285 we could put it together in a pseudoreg directly,
4286 but for now that's not worth bothering with. */
4290 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
4291 passed_type
, named_arg
);
4295 current_function_pretend_args_size
4296 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
4297 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4298 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4300 /* Handle calls that pass values in multiple non-contiguous
4301 locations. The Irix 6 ABI has examples of this. */
4302 if (GET_CODE (entry_parm
) == PARALLEL
)
4303 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4304 int_size_in_bytes (TREE_TYPE (parm
)),
4305 (TYPE_ALIGN (TREE_TYPE (parm
))
4308 move_block_from_reg (REGNO (entry_parm
),
4309 validize_mem (stack_parm
), nregs
,
4310 int_size_in_bytes (TREE_TYPE (parm
)));
4312 entry_parm
= stack_parm
;
4317 /* If we didn't decide this parm came in a register,
4318 by default it came on the stack. */
4319 if (entry_parm
== 0)
4320 entry_parm
= stack_parm
;
4322 /* Record permanently how this parm was passed. */
4323 DECL_INCOMING_RTL (parm
) = entry_parm
;
4325 /* If there is actually space on the stack for this parm,
4326 count it in stack_args_size; otherwise set stack_parm to 0
4327 to indicate there is no preallocated stack slot for the parm. */
4329 if (entry_parm
== stack_parm
4330 || (GET_CODE (entry_parm
) == PARALLEL
4331 && XEXP (XVECEXP (entry_parm
, 0, 0), 0) == NULL_RTX
)
4332 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4333 /* On some machines, even if a parm value arrives in a register
4334 there is still an (uninitialized) stack slot allocated for it.
4336 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4337 whether this parameter already has a stack slot allocated,
4338 because an arg block exists only if current_function_args_size
4339 is larger than some threshold, and we haven't calculated that
4340 yet. So, for now, we just assume that stack slots never exist
4342 || REG_PARM_STACK_SPACE (fndecl
) > 0
4346 stack_args_size
.constant
+= arg_size
.constant
;
4348 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
4351 /* No stack slot was pushed for this parm. */
4354 /* Update info on where next arg arrives in registers. */
4356 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
4357 passed_type
, named_arg
);
4359 /* If we can't trust the parm stack slot to be aligned enough
4360 for its ultimate type, don't use that slot after entry.
4361 We'll make another stack slot, if we need one. */
4363 int thisparm_boundary
4364 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4366 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4370 /* If parm was passed in memory, and we need to convert it on entry,
4371 don't store it back in that same slot. */
4373 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4377 /* Now adjust STACK_PARM to the mode and precise location
4378 where this parameter should live during execution,
4379 if we discover that it must live in the stack during execution.
4380 To make debuggers happier on big-endian machines, we store
4381 the value in the last bytes of the space available. */
4383 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4388 if (BYTES_BIG_ENDIAN
4389 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4390 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4391 - GET_MODE_SIZE (nominal_mode
));
4393 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4394 if (offset_rtx
== const0_rtx
)
4395 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4397 stack_parm
= gen_rtx_MEM (nominal_mode
,
4398 gen_rtx_PLUS (Pmode
,
4399 internal_arg_pointer
,
4402 /* If this is a memory ref that contains aggregate components,
4403 mark it as such for cse and loop optimize. */
4404 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4408 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4409 in the mode in which it arrives.
4410 STACK_PARM is an RTX for a stack slot where the parameter can live
4411 during the function (in case we want to put it there).
4412 STACK_PARM is 0 if no stack slot was pushed for it.
4414 Now output code if necessary to convert ENTRY_PARM to
4415 the type in which this function declares it,
4416 and store that result in an appropriate place,
4417 which may be a pseudo reg, may be STACK_PARM,
4418 or may be a local stack slot if STACK_PARM is 0.
4420 Set DECL_RTL to that place. */
4422 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4424 /* If a BLKmode arrives in registers, copy it to a stack slot.
4425 Handle calls that pass values in multiple non-contiguous
4426 locations. The Irix 6 ABI has examples of this. */
4427 if (GET_CODE (entry_parm
) == REG
4428 || GET_CODE (entry_parm
) == PARALLEL
)
4431 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4434 /* Note that we will be storing an integral number of words.
4435 So we have to be careful to ensure that we allocate an
4436 integral number of words. We do this below in the
4437 assign_stack_local if space was not allocated in the argument
4438 list. If it was, this will not work if PARM_BOUNDARY is not
4439 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4440 if it becomes a problem. */
4442 if (stack_parm
== 0)
4445 = assign_stack_local (GET_MODE (entry_parm
),
4448 /* If this is a memory ref that contains aggregate
4449 components, mark it as such for cse and loop optimize. */
4450 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4453 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4456 if (TREE_READONLY (parm
))
4457 RTX_UNCHANGING_P (stack_parm
) = 1;
4459 /* Handle calls that pass values in multiple non-contiguous
4460 locations. The Irix 6 ABI has examples of this. */
4461 if (GET_CODE (entry_parm
) == PARALLEL
)
4462 emit_group_store (validize_mem (stack_parm
), entry_parm
,
4463 int_size_in_bytes (TREE_TYPE (parm
)),
4464 (TYPE_ALIGN (TREE_TYPE (parm
))
4467 move_block_from_reg (REGNO (entry_parm
),
4468 validize_mem (stack_parm
),
4469 size_stored
/ UNITS_PER_WORD
,
4470 int_size_in_bytes (TREE_TYPE (parm
)));
4472 DECL_RTL (parm
) = stack_parm
;
4474 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4475 && ! DECL_INLINE (fndecl
))
4476 /* layout_decl may set this. */
4477 || TREE_ADDRESSABLE (parm
)
4478 || TREE_SIDE_EFFECTS (parm
)
4479 /* If -ffloat-store specified, don't put explicit
4480 float variables into registers. */
4481 || (flag_float_store
4482 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4483 /* Always assign pseudo to structure return or item passed
4484 by invisible reference. */
4485 || passed_pointer
|| parm
== function_result_decl
)
4487 /* Store the parm in a pseudoregister during the function, but we
4488 may need to do it in a wider mode. */
4490 register rtx parmreg
;
4491 int regno
, regnoi
= 0, regnor
= 0;
4493 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4495 promoted_nominal_mode
4496 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4498 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4499 mark_user_reg (parmreg
);
4501 /* If this was an item that we received a pointer to, set DECL_RTL
4506 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4507 MEM_SET_IN_STRUCT_P (DECL_RTL (parm
), aggregate
);
4510 DECL_RTL (parm
) = parmreg
;
4512 /* Copy the value into the register. */
4513 if (nominal_mode
!= passed_mode
4514 || promoted_nominal_mode
!= promoted_mode
)
4517 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4518 mode, by the caller. We now have to convert it to
4519 NOMINAL_MODE, if different. However, PARMREG may be in
4520 a different mode than NOMINAL_MODE if it is being stored
4523 If ENTRY_PARM is a hard register, it might be in a register
4524 not valid for operating in its mode (e.g., an odd-numbered
4525 register for a DFmode). In that case, moves are the only
4526 thing valid, so we can't do a convert from there. This
4527 occurs when the calling sequence allow such misaligned
4530 In addition, the conversion may involve a call, which could
4531 clobber parameters which haven't been copied to pseudo
4532 registers yet. Therefore, we must first copy the parm to
4533 a pseudo reg here, and save the conversion until after all
4534 parameters have been moved. */
4536 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4538 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4540 push_to_sequence (conversion_insns
);
4541 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4543 /* TREE_USED gets set erroneously during expand_assignment. */
4544 save_tree_used
= TREE_USED (parm
);
4545 expand_assignment (parm
,
4546 make_tree (nominal_type
, tempreg
), 0, 0);
4547 TREE_USED (parm
) = save_tree_used
;
4548 conversion_insns
= get_insns ();
4553 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4555 /* If we were passed a pointer but the actual value
4556 can safely live in a register, put it in one. */
4557 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4558 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4559 && ! DECL_INLINE (fndecl
))
4560 /* layout_decl may set this. */
4561 || TREE_ADDRESSABLE (parm
)
4562 || TREE_SIDE_EFFECTS (parm
)
4563 /* If -ffloat-store specified, don't put explicit
4564 float variables into registers. */
4565 || (flag_float_store
4566 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4568 /* We can't use nominal_mode, because it will have been set to
4569 Pmode above. We must use the actual mode of the parm. */
4570 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4571 mark_user_reg (parmreg
);
4572 emit_move_insn (parmreg
, DECL_RTL (parm
));
4573 DECL_RTL (parm
) = parmreg
;
4574 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4578 #ifdef FUNCTION_ARG_CALLEE_COPIES
4579 /* If we are passed an arg by reference and it is our responsibility
4580 to make a copy, do it now.
4581 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4582 original argument, so we must recreate them in the call to
4583 FUNCTION_ARG_CALLEE_COPIES. */
4584 /* ??? Later add code to handle the case that if the argument isn't
4585 modified, don't do the copy. */
4587 else if (passed_pointer
4588 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4589 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4590 DECL_ARG_TYPE (parm
),
4592 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4595 tree type
= DECL_ARG_TYPE (parm
);
4597 /* This sequence may involve a library call perhaps clobbering
4598 registers that haven't been copied to pseudos yet. */
4600 push_to_sequence (conversion_insns
);
4602 if (TYPE_SIZE (type
) == 0
4603 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4604 /* This is a variable sized object. */
4605 copy
= gen_rtx_MEM (BLKmode
,
4606 allocate_dynamic_stack_space
4607 (expr_size (parm
), NULL_RTX
,
4608 TYPE_ALIGN (type
)));
4610 copy
= assign_stack_temp (TYPE_MODE (type
),
4611 int_size_in_bytes (type
), 1);
4612 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
4613 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4615 store_expr (parm
, copy
, 0);
4616 emit_move_insn (parmreg
, XEXP (copy
, 0));
4617 if (current_function_check_memory_usage
)
4618 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4619 XEXP (copy
, 0), Pmode
,
4620 GEN_INT (int_size_in_bytes (type
)),
4621 TYPE_MODE (sizetype
),
4622 GEN_INT (MEMORY_USE_RW
),
4623 TYPE_MODE (integer_type_node
));
4624 conversion_insns
= get_insns ();
4628 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4630 /* In any case, record the parm's desired stack location
4631 in case we later discover it must live in the stack.
4633 If it is a COMPLEX value, store the stack location for both
4636 if (GET_CODE (parmreg
) == CONCAT
)
4637 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4639 regno
= REGNO (parmreg
);
4641 if (regno
>= max_parm_reg
)
4644 int old_max_parm_reg
= max_parm_reg
;
4646 /* It's slow to expand this one register at a time,
4647 but it's also rare and we need max_parm_reg to be
4648 precisely correct. */
4649 max_parm_reg
= regno
+ 1;
4650 new = (rtx
*) xrealloc (parm_reg_stack_loc
,
4651 max_parm_reg
* sizeof (rtx
));
4652 bzero ((char *) (new + old_max_parm_reg
),
4653 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4654 parm_reg_stack_loc
= new;
4657 if (GET_CODE (parmreg
) == CONCAT
)
4659 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4661 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4662 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4664 if (stack_parm
!= 0)
4666 parm_reg_stack_loc
[regnor
]
4667 = gen_realpart (submode
, stack_parm
);
4668 parm_reg_stack_loc
[regnoi
]
4669 = gen_imagpart (submode
, stack_parm
);
4673 parm_reg_stack_loc
[regnor
] = 0;
4674 parm_reg_stack_loc
[regnoi
] = 0;
4678 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4680 /* Mark the register as eliminable if we did no conversion
4681 and it was copied from memory at a fixed offset,
4682 and the arg pointer was not copied to a pseudo-reg.
4683 If the arg pointer is a pseudo reg or the offset formed
4684 an invalid address, such memory-equivalences
4685 as we make here would screw up life analysis for it. */
4686 if (nominal_mode
== passed_mode
4689 && GET_CODE (stack_parm
) == MEM
4690 && stack_offset
.var
== 0
4691 && reg_mentioned_p (virtual_incoming_args_rtx
,
4692 XEXP (stack_parm
, 0)))
4694 rtx linsn
= get_last_insn ();
4697 /* Mark complex types separately. */
4698 if (GET_CODE (parmreg
) == CONCAT
)
4699 /* Scan backwards for the set of the real and
4701 for (sinsn
= linsn
; sinsn
!= 0;
4702 sinsn
= prev_nonnote_insn (sinsn
))
4704 set
= single_set (sinsn
);
4706 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4708 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4709 parm_reg_stack_loc
[regnoi
],
4712 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4714 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4715 parm_reg_stack_loc
[regnor
],
4718 else if ((set
= single_set (linsn
)) != 0
4719 && SET_DEST (set
) == parmreg
)
4721 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4722 stack_parm
, REG_NOTES (linsn
));
4725 /* For pointer data type, suggest pointer register. */
4726 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4727 mark_reg_pointer (parmreg
,
4728 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4733 /* Value must be stored in the stack slot STACK_PARM
4734 during function execution. */
4736 if (promoted_mode
!= nominal_mode
)
4738 /* Conversion is required. */
4739 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4741 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4743 push_to_sequence (conversion_insns
);
4744 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4745 TREE_UNSIGNED (TREE_TYPE (parm
)));
4748 /* ??? This may need a big-endian conversion on sparc64. */
4749 stack_parm
= change_address (stack_parm
, nominal_mode
,
4752 conversion_insns
= get_insns ();
4757 if (entry_parm
!= stack_parm
)
4759 if (stack_parm
== 0)
4762 = assign_stack_local (GET_MODE (entry_parm
),
4763 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4764 /* If this is a memory ref that contains aggregate components,
4765 mark it as such for cse and loop optimize. */
4766 MEM_SET_IN_STRUCT_P (stack_parm
, aggregate
);
4769 if (promoted_mode
!= nominal_mode
)
4771 push_to_sequence (conversion_insns
);
4772 emit_move_insn (validize_mem (stack_parm
),
4773 validize_mem (entry_parm
));
4774 conversion_insns
= get_insns ();
4778 emit_move_insn (validize_mem (stack_parm
),
4779 validize_mem (entry_parm
));
4781 if (current_function_check_memory_usage
)
4783 push_to_sequence (conversion_insns
);
4784 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4785 XEXP (stack_parm
, 0), Pmode
,
4786 GEN_INT (GET_MODE_SIZE (GET_MODE
4788 TYPE_MODE (sizetype
),
4789 GEN_INT (MEMORY_USE_RW
),
4790 TYPE_MODE (integer_type_node
));
4792 conversion_insns
= get_insns ();
4795 DECL_RTL (parm
) = stack_parm
;
4798 /* If this "parameter" was the place where we are receiving the
4799 function's incoming structure pointer, set up the result. */
4800 if (parm
== function_result_decl
)
4802 tree result
= DECL_RESULT (fndecl
);
4803 tree restype
= TREE_TYPE (result
);
4806 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4808 MEM_SET_IN_STRUCT_P (DECL_RTL (result
),
4809 AGGREGATE_TYPE_P (restype
));
4812 if (TREE_THIS_VOLATILE (parm
))
4813 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4814 if (TREE_READONLY (parm
))
4815 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4818 /* Output all parameter conversion instructions (possibly including calls)
4819 now that all parameters have been copied out of hard registers. */
4820 emit_insns (conversion_insns
);
4822 last_parm_insn
= get_last_insn ();
4824 current_function_args_size
= stack_args_size
.constant
;
4826 /* Adjust function incoming argument size for alignment and
4829 #ifdef REG_PARM_STACK_SPACE
4830 #ifndef MAYBE_REG_PARM_STACK_SPACE
4831 current_function_args_size
= MAX (current_function_args_size
,
4832 REG_PARM_STACK_SPACE (fndecl
));
4836 #ifdef STACK_BOUNDARY
4837 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4839 current_function_args_size
4840 = ((current_function_args_size
+ STACK_BYTES
- 1)
4841 / STACK_BYTES
) * STACK_BYTES
;
4844 #ifdef ARGS_GROW_DOWNWARD
4845 current_function_arg_offset_rtx
4846 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4847 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4848 size_int (-stack_args_size
.constant
)),
4849 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4851 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4854 /* See how many bytes, if any, of its args a function should try to pop
4857 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4858 current_function_args_size
);
4860 /* For stdarg.h function, save info about
4861 regs and stack space used by the named args. */
4864 current_function_args_info
= args_so_far
;
4866 /* Set the rtx used for the function return value. Put this in its
4867 own variable so any optimizers that need this information don't have
4868 to include tree.h. Do this here so it gets done when an inlined
4869 function gets output. */
4871 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4874 /* Indicate whether REGNO is an incoming argument to the current function
4875 that was promoted to a wider mode. If so, return the RTX for the
4876 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4877 that REGNO is promoted from and whether the promotion was signed or
4880 #ifdef PROMOTE_FUNCTION_ARGS
4883 promoted_input_arg (regno
, pmode
, punsignedp
)
4885 enum machine_mode
*pmode
;
4890 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4891 arg
= TREE_CHAIN (arg
))
4892 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4893 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4894 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4896 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4897 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4899 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4900 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4901 && mode
!= DECL_MODE (arg
))
4903 *pmode
= DECL_MODE (arg
);
4904 *punsignedp
= unsignedp
;
4905 return DECL_INCOMING_RTL (arg
);
4914 /* Compute the size and offset from the start of the stacked arguments for a
4915 parm passed in mode PASSED_MODE and with type TYPE.
4917 INITIAL_OFFSET_PTR points to the current offset into the stacked
4920 The starting offset and size for this parm are returned in *OFFSET_PTR
4921 and *ARG_SIZE_PTR, respectively.
4923 IN_REGS is non-zero if the argument will be passed in registers. It will
4924 never be set if REG_PARM_STACK_SPACE is not defined.
4926 FNDECL is the function in which the argument was defined.
4928 There are two types of rounding that are done. The first, controlled by
4929 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4930 list to be aligned to the specific boundary (in bits). This rounding
4931 affects the initial and starting offsets, but not the argument size.
4933 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4934 optionally rounds the size of the parm to PARM_BOUNDARY. The
4935 initial offset is not affected by this rounding, while the size always
4936 is and the starting offset may be. */
4938 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4939 initial_offset_ptr is positive because locate_and_pad_parm's
4940 callers pass in the total size of args so far as
4941 initial_offset_ptr. arg_size_ptr is always positive.*/
4944 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4945 initial_offset_ptr
, offset_ptr
, arg_size_ptr
,
4947 enum machine_mode passed_mode
;
4950 tree fndecl ATTRIBUTE_UNUSED
;
4951 struct args_size
*initial_offset_ptr
;
4952 struct args_size
*offset_ptr
;
4953 struct args_size
*arg_size_ptr
;
4954 struct args_size
*alignment_pad
;
4958 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4959 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4960 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4962 #ifdef REG_PARM_STACK_SPACE
4963 /* If we have found a stack parm before we reach the end of the
4964 area reserved for registers, skip that area. */
4967 int reg_parm_stack_space
= 0;
4969 #ifdef MAYBE_REG_PARM_STACK_SPACE
4970 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4972 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4974 if (reg_parm_stack_space
> 0)
4976 if (initial_offset_ptr
->var
)
4978 initial_offset_ptr
->var
4979 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4980 size_int (reg_parm_stack_space
));
4981 initial_offset_ptr
->constant
= 0;
4983 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4984 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4987 #endif /* REG_PARM_STACK_SPACE */
4989 arg_size_ptr
->var
= 0;
4990 arg_size_ptr
->constant
= 0;
4992 #ifdef ARGS_GROW_DOWNWARD
4993 if (initial_offset_ptr
->var
)
4995 offset_ptr
->constant
= 0;
4996 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4997 initial_offset_ptr
->var
);
5001 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
5002 offset_ptr
->var
= 0;
5004 if (where_pad
!= none
5005 && (TREE_CODE (sizetree
) != INTEGER_CST
5006 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5007 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5008 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5009 if (where_pad
!= downward
)
5010 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
);
5011 if (initial_offset_ptr
->var
)
5013 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
5014 size_binop (MINUS_EXPR
,
5016 initial_offset_ptr
->var
),
5021 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
5022 - offset_ptr
->constant
);
5024 #else /* !ARGS_GROW_DOWNWARD */
5025 pad_to_arg_alignment (initial_offset_ptr
, boundary
, alignment_pad
);
5026 *offset_ptr
= *initial_offset_ptr
;
5028 #ifdef PUSH_ROUNDING
5029 if (passed_mode
!= BLKmode
)
5030 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
5033 /* Pad_below needs the pre-rounded size to know how much to pad below
5034 so this must be done before rounding up. */
5035 if (where_pad
== downward
5036 /* However, BLKmode args passed in regs have their padding done elsewhere.
5037 The stack slot must be able to hold the entire register. */
5038 && !(in_regs
&& passed_mode
== BLKmode
))
5039 pad_below (offset_ptr
, passed_mode
, sizetree
);
5041 if (where_pad
!= none
5042 && (TREE_CODE (sizetree
) != INTEGER_CST
5043 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
5044 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5046 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
5047 #endif /* ARGS_GROW_DOWNWARD */
5050 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5051 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5054 pad_to_arg_alignment (offset_ptr
, boundary
, alignment_pad
)
5055 struct args_size
*offset_ptr
;
5057 struct args_size
*alignment_pad
;
5060 HOST_WIDE_INT save_constant
;
5062 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
5064 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5066 save_var
= offset_ptr
->var
;
5067 save_constant
= offset_ptr
->constant
;
5070 alignment_pad
->var
= NULL_TREE
;
5071 alignment_pad
->constant
= 0;
5073 if (boundary
> BITS_PER_UNIT
)
5075 if (offset_ptr
->var
)
5078 #ifdef ARGS_GROW_DOWNWARD
5083 (ARGS_SIZE_TREE (*offset_ptr
),
5084 boundary
/ BITS_PER_UNIT
);
5085 offset_ptr
->constant
= 0; /*?*/
5086 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5087 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
, save_var
);
5091 offset_ptr
->constant
=
5092 #ifdef ARGS_GROW_DOWNWARD
5093 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5095 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
5097 if (boundary
> PARM_BOUNDARY
&& boundary
> STACK_BOUNDARY
)
5098 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
5103 #ifndef ARGS_GROW_DOWNWARD
5105 pad_below (offset_ptr
, passed_mode
, sizetree
)
5106 struct args_size
*offset_ptr
;
5107 enum machine_mode passed_mode
;
5110 if (passed_mode
!= BLKmode
)
5112 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
5113 offset_ptr
->constant
5114 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
5115 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
5116 - GET_MODE_SIZE (passed_mode
));
5120 if (TREE_CODE (sizetree
) != INTEGER_CST
5121 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
5123 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5124 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5126 ADD_PARM_SIZE (*offset_ptr
, s2
);
5127 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
5133 #ifdef ARGS_GROW_DOWNWARD
5135 round_down (value
, divisor
)
5139 return size_binop (MULT_EXPR
,
5140 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
5141 size_int (divisor
));
5145 /* Walk the tree of blocks describing the binding levels within a function
5146 and warn about uninitialized variables.
5147 This is done after calling flow_analysis and before global_alloc
5148 clobbers the pseudo-regs to hard regs. */
5151 uninitialized_vars_warning (block
)
5154 register tree decl
, sub
;
5155 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5157 if (warn_uninitialized
5158 && TREE_CODE (decl
) == VAR_DECL
5159 /* These warnings are unreliable for and aggregates
5160 because assigning the fields one by one can fail to convince
5161 flow.c that the entire aggregate was initialized.
5162 Unions are troublesome because members may be shorter. */
5163 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5164 && DECL_RTL (decl
) != 0
5165 && GET_CODE (DECL_RTL (decl
)) == REG
5166 /* Global optimizations can make it difficult to determine if a
5167 particular variable has been initialized. However, a VAR_DECL
5168 with a nonzero DECL_INITIAL had an initializer, so do not
5169 claim it is potentially uninitialized.
5171 We do not care about the actual value in DECL_INITIAL, so we do
5172 not worry that it may be a dangling pointer. */
5173 && DECL_INITIAL (decl
) == NULL_TREE
5174 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
5175 warning_with_decl (decl
,
5176 "`%s' might be used uninitialized in this function");
5178 && TREE_CODE (decl
) == VAR_DECL
5179 && DECL_RTL (decl
) != 0
5180 && GET_CODE (DECL_RTL (decl
)) == REG
5181 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5182 warning_with_decl (decl
,
5183 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5185 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5186 uninitialized_vars_warning (sub
);
5189 /* Do the appropriate part of uninitialized_vars_warning
5190 but for arguments instead of local variables. */
5193 setjmp_args_warning ()
5196 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5197 decl
; decl
= TREE_CHAIN (decl
))
5198 if (DECL_RTL (decl
) != 0
5199 && GET_CODE (DECL_RTL (decl
)) == REG
5200 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
5201 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5204 /* If this function call setjmp, put all vars into the stack
5205 unless they were declared `register'. */
5208 setjmp_protect (block
)
5211 register tree decl
, sub
;
5212 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
5213 if ((TREE_CODE (decl
) == VAR_DECL
5214 || TREE_CODE (decl
) == PARM_DECL
)
5215 && DECL_RTL (decl
) != 0
5216 && (GET_CODE (DECL_RTL (decl
)) == REG
5217 || (GET_CODE (DECL_RTL (decl
)) == MEM
5218 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5219 /* If this variable came from an inline function, it must be
5220 that its life doesn't overlap the setjmp. If there was a
5221 setjmp in the function, it would already be in memory. We
5222 must exclude such variable because their DECL_RTL might be
5223 set to strange things such as virtual_stack_vars_rtx. */
5224 && ! DECL_FROM_INLINE (decl
)
5226 #ifdef NON_SAVING_SETJMP
5227 /* If longjmp doesn't restore the registers,
5228 don't put anything in them. */
5232 ! DECL_REGISTER (decl
)))
5233 put_var_into_stack (decl
);
5234 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
5235 setjmp_protect (sub
);
5238 /* Like the previous function, but for args instead of local variables. */
5241 setjmp_protect_args ()
5244 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5245 decl
; decl
= TREE_CHAIN (decl
))
5246 if ((TREE_CODE (decl
) == VAR_DECL
5247 || TREE_CODE (decl
) == PARM_DECL
)
5248 && DECL_RTL (decl
) != 0
5249 && (GET_CODE (DECL_RTL (decl
)) == REG
5250 || (GET_CODE (DECL_RTL (decl
)) == MEM
5251 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
5253 /* If longjmp doesn't restore the registers,
5254 don't put anything in them. */
5255 #ifdef NON_SAVING_SETJMP
5259 ! DECL_REGISTER (decl
)))
5260 put_var_into_stack (decl
);
5263 /* Return the context-pointer register corresponding to DECL,
5264 or 0 if it does not need one. */
5267 lookup_static_chain (decl
)
5270 tree context
= decl_function_context (decl
);
5274 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
5277 /* We treat inline_function_decl as an alias for the current function
5278 because that is the inline function whose vars, types, etc.
5279 are being merged into the current function.
5280 See expand_inline_function. */
5281 if (context
== current_function_decl
|| context
== inline_function_decl
)
5282 return virtual_stack_vars_rtx
;
5284 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5285 if (TREE_PURPOSE (link
) == context
)
5286 return RTL_EXPR_RTL (TREE_VALUE (link
));
5291 /* Convert a stack slot address ADDR for variable VAR
5292 (from a containing function)
5293 into an address valid in this function (using a static chain). */
5296 fix_lexical_addr (addr
, var
)
5301 HOST_WIDE_INT displacement
;
5302 tree context
= decl_function_context (var
);
5303 struct function
*fp
;
5306 /* If this is the present function, we need not do anything. */
5307 if (context
== current_function_decl
|| context
== inline_function_decl
)
5310 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5311 if (fp
->decl
== context
)
5317 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
5318 addr
= XEXP (XEXP (addr
, 0), 0);
5320 /* Decode given address as base reg plus displacement. */
5321 if (GET_CODE (addr
) == REG
)
5322 basereg
= addr
, displacement
= 0;
5323 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
5324 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
5328 /* We accept vars reached via the containing function's
5329 incoming arg pointer and via its stack variables pointer. */
5330 if (basereg
== fp
->internal_arg_pointer
)
5332 /* If reached via arg pointer, get the arg pointer value
5333 out of that function's stack frame.
5335 There are two cases: If a separate ap is needed, allocate a
5336 slot in the outer function for it and dereference it that way.
5337 This is correct even if the real ap is actually a pseudo.
5338 Otherwise, just adjust the offset from the frame pointer to
5341 #ifdef NEED_SEPARATE_AP
5344 if (fp
->x_arg_pointer_save_area
== 0)
5345 fp
->x_arg_pointer_save_area
5346 = assign_stack_local_1 (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
5348 addr
= fix_lexical_addr (XEXP (fp
->x_arg_pointer_save_area
, 0), var
);
5349 addr
= memory_address (Pmode
, addr
);
5351 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
5353 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
5354 base
= lookup_static_chain (var
);
5358 else if (basereg
== virtual_stack_vars_rtx
)
5360 /* This is the same code as lookup_static_chain, duplicated here to
5361 avoid an extra call to decl_function_context. */
5364 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
5365 if (TREE_PURPOSE (link
) == context
)
5367 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
5375 /* Use same offset, relative to appropriate static chain or argument
5377 return plus_constant (base
, displacement
);
5380 /* Return the address of the trampoline for entering nested fn FUNCTION.
5381 If necessary, allocate a trampoline (in the stack frame)
5382 and emit rtl to initialize its contents (at entry to this function). */
5385 trampoline_address (function
)
5391 struct function
*fp
;
5394 /* Find an existing trampoline and return it. */
5395 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5396 if (TREE_PURPOSE (link
) == function
)
5398 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5400 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5401 for (link
= fp
->x_trampoline_list
; link
; link
= TREE_CHAIN (link
))
5402 if (TREE_PURPOSE (link
) == function
)
5404 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5406 return round_trampoline_addr (tramp
);
5409 /* None exists; we must make one. */
5411 /* Find the `struct function' for the function containing FUNCTION. */
5413 fn_context
= decl_function_context (function
);
5414 if (fn_context
!= current_function_decl
5415 && fn_context
!= inline_function_decl
)
5416 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5417 if (fp
->decl
== fn_context
)
5420 /* Allocate run-time space for this trampoline
5421 (usually in the defining function's stack frame). */
5422 #ifdef ALLOCATE_TRAMPOLINE
5423 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5425 /* If rounding needed, allocate extra space
5426 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5427 #ifdef TRAMPOLINE_ALIGNMENT
5428 #define TRAMPOLINE_REAL_SIZE \
5429 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5431 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5433 tramp
= assign_stack_local_1 (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0,
5434 fp
? fp
: current_function
);
5437 /* Record the trampoline for reuse and note it for later initialization
5438 by expand_function_end. */
5441 push_obstacks (fp
->function_maybepermanent_obstack
,
5442 fp
->function_maybepermanent_obstack
);
5443 rtlexp
= make_node (RTL_EXPR
);
5444 RTL_EXPR_RTL (rtlexp
) = tramp
;
5445 fp
->x_trampoline_list
= tree_cons (function
, rtlexp
,
5446 fp
->x_trampoline_list
);
5451 /* Make the RTL_EXPR node temporary, not momentary, so that the
5452 trampoline_list doesn't become garbage. */
5453 int momentary
= suspend_momentary ();
5454 rtlexp
= make_node (RTL_EXPR
);
5455 resume_momentary (momentary
);
5457 RTL_EXPR_RTL (rtlexp
) = tramp
;
5458 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5461 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5462 return round_trampoline_addr (tramp
);
5465 /* Given a trampoline address,
5466 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5469 round_trampoline_addr (tramp
)
5472 #ifdef TRAMPOLINE_ALIGNMENT
5473 /* Round address up to desired boundary. */
5474 rtx temp
= gen_reg_rtx (Pmode
);
5475 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5476 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5477 temp
, 0, OPTAB_LIB_WIDEN
);
5478 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5479 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5480 temp
, 0, OPTAB_LIB_WIDEN
);
5485 /* Insert the BLOCK in the block-tree before LAST_INSN. */
5488 retrofit_block (block
, last_insn
)
5494 /* Now insert the new BLOCK at the right place in the block trees
5495 for the function which called the inline function. We just look
5496 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5497 beginning of a block, then this new block becomes the first
5498 subblock of that block. If we find the end of a block, then this
5499 new block follows that block in the list of blocks. */
5500 for (insn
= last_insn
; insn
; insn
= PREV_INSN (insn
))
5501 if (GET_CODE (insn
) == NOTE
5502 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
5503 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
5505 if (!insn
|| NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5510 superblock
= NOTE_BLOCK (insn
);
5512 superblock
= DECL_INITIAL (current_function_decl
);
5514 BLOCK_SUPERCONTEXT (block
) = superblock
;
5515 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (superblock
);
5516 BLOCK_SUBBLOCKS (superblock
) = block
;
5520 tree prevblock
= NOTE_BLOCK (insn
);
5522 BLOCK_SUPERCONTEXT (block
) = BLOCK_SUPERCONTEXT (prevblock
);
5523 BLOCK_CHAIN (block
) = BLOCK_CHAIN (prevblock
);
5524 BLOCK_CHAIN (prevblock
) = block
;
5528 /* The functions identify_blocks and reorder_blocks provide a way to
5529 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5530 duplicate portions of the RTL code. Call identify_blocks before
5531 changing the RTL, and call reorder_blocks after. */
5533 /* Put all this function's BLOCK nodes including those that are chained
5534 onto the first block into a vector, and return it.
5535 Also store in each NOTE for the beginning or end of a block
5536 the index of that block in the vector.
5537 The arguments are BLOCK, the chain of top-level blocks of the function,
5538 and INSNS, the insn chain of the function. */
5541 identify_blocks (block
, insns
)
5549 int current_block_number
= 1;
5555 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5556 depth-first order. */
5557 n_blocks
= all_blocks (block
, 0);
5558 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5559 all_blocks (block
, block_vector
);
5561 block_stack
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5563 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5564 if (GET_CODE (insn
) == NOTE
)
5566 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5570 /* If there are more block notes than BLOCKs, something
5572 if (current_block_number
== n_blocks
)
5575 b
= block_vector
[current_block_number
++];
5576 NOTE_BLOCK (insn
) = b
;
5577 block_stack
[depth
++] = b
;
5579 else if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5582 /* There are more NOTE_INSN_BLOCK_ENDs that
5583 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5586 NOTE_BLOCK (insn
) = block_stack
[--depth
];
5590 /* In whole-function mode, we might not have seen the whole function
5591 yet, so we might not use up all the blocks. */
5592 if (n_blocks
!= current_block_number
5593 && !current_function
->x_whole_function_mode_p
)
5596 free (block_vector
);
5600 /* Given a revised instruction chain, rebuild the tree structure of
5601 BLOCK nodes to correspond to the new order of RTL. The new block
5602 tree is inserted below TOP_BLOCK. Returns the current top-level
5606 reorder_blocks (block
, insns
)
5610 tree current_block
= block
;
5613 if (block
== NULL_TREE
)
5616 /* Prune the old trees away, so that it doesn't get in the way. */
5617 BLOCK_SUBBLOCKS (current_block
) = 0;
5618 BLOCK_CHAIN (current_block
) = 0;
5620 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5621 if (GET_CODE (insn
) == NOTE
)
5623 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5625 tree block
= NOTE_BLOCK (insn
);
5626 /* If we have seen this block before, copy it. */
5627 if (TREE_ASM_WRITTEN (block
))
5628 block
= copy_node (block
);
5629 BLOCK_SUBBLOCKS (block
) = 0;
5630 TREE_ASM_WRITTEN (block
) = 1;
5631 BLOCK_SUPERCONTEXT (block
) = current_block
;
5632 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5633 BLOCK_SUBBLOCKS (current_block
) = block
;
5634 current_block
= block
;
5635 NOTE_SOURCE_FILE (insn
) = 0;
5637 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5639 BLOCK_SUBBLOCKS (current_block
)
5640 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5641 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5642 NOTE_SOURCE_FILE (insn
) = 0;
5646 BLOCK_SUBBLOCKS (current_block
)
5647 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5648 return current_block
;
5651 /* Reverse the order of elements in the chain T of blocks,
5652 and return the new head of the chain (old last element). */
5658 register tree prev
= 0, decl
, next
;
5659 for (decl
= t
; decl
; decl
= next
)
5661 next
= BLOCK_CHAIN (decl
);
5662 BLOCK_CHAIN (decl
) = prev
;
5668 /* Count the subblocks of the list starting with BLOCK, and list them
5669 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5673 all_blocks (block
, vector
)
5681 TREE_ASM_WRITTEN (block
) = 0;
5683 /* Record this block. */
5685 vector
[n_blocks
] = block
;
5689 /* Record the subblocks, and their subblocks... */
5690 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5691 vector
? vector
+ n_blocks
: 0);
5692 block
= BLOCK_CHAIN (block
);
5698 /* Allocate a function structure and reset its contents to the defaults. */
5700 prepare_function_start ()
5702 current_function
= (struct function
*) xcalloc (1, sizeof (struct function
));
5704 init_stmt_for_function ();
5705 init_eh_for_function ();
5707 cse_not_expected
= ! optimize
;
5709 /* Caller save not needed yet. */
5710 caller_save_needed
= 0;
5712 /* No stack slots have been made yet. */
5713 stack_slot_list
= 0;
5715 current_function_has_nonlocal_label
= 0;
5716 current_function_has_nonlocal_goto
= 0;
5718 /* There is no stack slot for handling nonlocal gotos. */
5719 nonlocal_goto_handler_slots
= 0;
5720 nonlocal_goto_stack_level
= 0;
5722 /* No labels have been declared for nonlocal use. */
5723 nonlocal_labels
= 0;
5724 nonlocal_goto_handler_labels
= 0;
5726 /* No function calls so far in this function. */
5727 function_call_count
= 0;
5729 /* No parm regs have been allocated.
5730 (This is important for output_inline_function.) */
5731 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5733 /* Initialize the RTL mechanism. */
5736 /* Initialize the queue of pending postincrement and postdecrements,
5737 and some other info in expr.c. */
5740 /* We haven't done register allocation yet. */
5743 init_varasm_status (current_function
);
5745 /* Clear out data used for inlining. */
5746 current_function
->inlinable
= 0;
5747 current_function
->original_decl_initial
= 0;
5748 current_function
->original_arg_vector
= 0;
5750 /* Set if a call to setjmp is seen. */
5751 current_function_calls_setjmp
= 0;
5753 /* Set if a call to longjmp is seen. */
5754 current_function_calls_longjmp
= 0;
5756 current_function_calls_alloca
= 0;
5757 current_function_contains_functions
= 0;
5758 current_function_is_leaf
= 0;
5759 current_function_sp_is_unchanging
= 0;
5760 current_function_uses_only_leaf_regs
= 0;
5761 current_function_has_computed_jump
= 0;
5762 current_function_is_thunk
= 0;
5764 current_function_returns_pcc_struct
= 0;
5765 current_function_returns_struct
= 0;
5766 current_function_epilogue_delay_list
= 0;
5767 current_function_uses_const_pool
= 0;
5768 current_function_uses_pic_offset_table
= 0;
5769 current_function_cannot_inline
= 0;
5771 /* We have not yet needed to make a label to jump to for tail-recursion. */
5772 tail_recursion_label
= 0;
5774 /* We haven't had a need to make a save area for ap yet. */
5775 arg_pointer_save_area
= 0;
5777 /* No stack slots allocated yet. */
5780 /* No SAVE_EXPRs in this function yet. */
5783 /* No RTL_EXPRs in this function yet. */
5786 /* Set up to allocate temporaries. */
5789 /* Indicate that we need to distinguish between the return value of the
5790 present function and the return value of a function being called. */
5791 rtx_equal_function_value_matters
= 1;
5793 /* Indicate that we have not instantiated virtual registers yet. */
5794 virtuals_instantiated
= 0;
5796 /* Indicate we have no need of a frame pointer yet. */
5797 frame_pointer_needed
= 0;
5799 /* By default assume not varargs or stdarg. */
5800 current_function_varargs
= 0;
5801 current_function_stdarg
= 0;
5803 /* We haven't made any trampolines for this function yet. */
5804 trampoline_list
= 0;
5806 init_pending_stack_adjust ();
5807 inhibit_defer_pop
= 0;
5809 current_function_outgoing_args_size
= 0;
5811 if (init_lang_status
)
5812 (*init_lang_status
) (current_function
);
5813 if (init_machine_status
)
5814 (*init_machine_status
) (current_function
);
5817 /* Initialize the rtl expansion mechanism so that we can do simple things
5818 like generate sequences. This is used to provide a context during global
5819 initialization of some passes. */
5821 init_dummy_function_start ()
5823 prepare_function_start ();
5826 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5827 and initialize static variables for generating RTL for the statements
5831 init_function_start (subr
, filename
, line
)
5836 prepare_function_start ();
5838 /* Remember this function for later. */
5839 current_function
->next_global
= all_functions
;
5840 all_functions
= current_function
;
5842 current_function_name
= (*decl_printable_name
) (subr
, 2);
5843 current_function
->decl
= subr
;
5845 /* Nonzero if this is a nested function that uses a static chain. */
5847 current_function_needs_context
5848 = (decl_function_context (current_function_decl
) != 0
5849 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5851 /* Within function body, compute a type's size as soon it is laid out. */
5852 immediate_size_expand
++;
5854 /* Prevent ever trying to delete the first instruction of a function.
5855 Also tell final how to output a linenum before the function prologue.
5856 Note linenums could be missing, e.g. when compiling a Java .class file. */
5858 emit_line_note (filename
, line
);
5860 /* Make sure first insn is a note even if we don't want linenums.
5861 This makes sure the first insn will never be deleted.
5862 Also, final expects a note to appear there. */
5863 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5865 /* Set flags used by final.c. */
5866 if (aggregate_value_p (DECL_RESULT (subr
)))
5868 #ifdef PCC_STATIC_STRUCT_RETURN
5869 current_function_returns_pcc_struct
= 1;
5871 current_function_returns_struct
= 1;
5874 /* Warn if this value is an aggregate type,
5875 regardless of which calling convention we are using for it. */
5876 if (warn_aggregate_return
5877 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5878 warning ("function returns an aggregate");
5880 current_function_returns_pointer
5881 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5884 /* Make sure all values used by the optimization passes have sane
5887 init_function_for_compilation ()
5890 /* No prologue/epilogue insns yet. */
5891 prologue
= epilogue
= 0;
5894 /* Indicate that the current function uses extra args
5895 not explicitly mentioned in the argument list in any fashion. */
5900 current_function_varargs
= 1;
5903 /* Expand a call to __main at the beginning of a possible main function. */
5905 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5906 #undef HAS_INIT_SECTION
5907 #define HAS_INIT_SECTION
5911 expand_main_function ()
5913 #if !defined (HAS_INIT_SECTION)
5914 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
5916 #endif /* not HAS_INIT_SECTION */
5919 extern struct obstack permanent_obstack
;
5921 /* Start the RTL for a new function, and set variables used for
5923 SUBR is the FUNCTION_DECL node.
5924 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5925 the function's parameters, which must be run at any return statement. */
5928 expand_function_start (subr
, parms_have_cleanups
)
5930 int parms_have_cleanups
;
5934 rtx last_ptr
= NULL_RTX
;
5936 /* Make sure volatile mem refs aren't considered
5937 valid operands of arithmetic insns. */
5938 init_recog_no_volatile ();
5940 /* Set this before generating any memory accesses. */
5941 current_function_check_memory_usage
5942 = (flag_check_memory_usage
5943 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl
));
5945 current_function_instrument_entry_exit
5946 = (flag_instrument_function_entry_exit
5947 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5949 /* If function gets a static chain arg, store it in the stack frame.
5950 Do this first, so it gets the first stack slot offset. */
5951 if (current_function_needs_context
)
5953 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5955 /* Delay copying static chain if it is not a register to avoid
5956 conflicts with regs used for parameters. */
5957 if (! SMALL_REGISTER_CLASSES
5958 || GET_CODE (static_chain_incoming_rtx
) == REG
)
5959 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5962 /* If the parameters of this function need cleaning up, get a label
5963 for the beginning of the code which executes those cleanups. This must
5964 be done before doing anything with return_label. */
5965 if (parms_have_cleanups
)
5966 cleanup_label
= gen_label_rtx ();
5970 /* Make the label for return statements to jump to, if this machine
5971 does not have a one-instruction return and uses an epilogue,
5972 or if it returns a structure, or if it has parm cleanups. */
5974 if (cleanup_label
== 0 && HAVE_return
5975 && ! current_function_instrument_entry_exit
5976 && ! current_function_returns_pcc_struct
5977 && ! (current_function_returns_struct
&& ! optimize
))
5980 return_label
= gen_label_rtx ();
5982 return_label
= gen_label_rtx ();
5985 /* Initialize rtx used to return the value. */
5986 /* Do this before assign_parms so that we copy the struct value address
5987 before any library calls that assign parms might generate. */
5989 /* Decide whether to return the value in memory or in a register. */
5990 if (aggregate_value_p (DECL_RESULT (subr
)))
5992 /* Returning something that won't go in a register. */
5993 register rtx value_address
= 0;
5995 #ifdef PCC_STATIC_STRUCT_RETURN
5996 if (current_function_returns_pcc_struct
)
5998 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5999 value_address
= assemble_static_space (size
);
6004 /* Expect to be passed the address of a place to store the value.
6005 If it is passed as an argument, assign_parms will take care of
6007 if (struct_value_incoming_rtx
)
6009 value_address
= gen_reg_rtx (Pmode
);
6010 emit_move_insn (value_address
, struct_value_incoming_rtx
);
6015 DECL_RTL (DECL_RESULT (subr
))
6016 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
6017 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)),
6018 AGGREGATE_TYPE_P (TREE_TYPE
6023 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
6024 /* If return mode is void, this decl rtl should not be used. */
6025 DECL_RTL (DECL_RESULT (subr
)) = 0;
6026 else if (parms_have_cleanups
|| current_function_instrument_entry_exit
)
6028 /* If function will end with cleanup code for parms,
6029 compute the return values into a pseudo reg,
6030 which we will copy into the true return register
6031 after the cleanups are done. */
6033 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
6035 #ifdef PROMOTE_FUNCTION_RETURN
6036 tree type
= TREE_TYPE (DECL_RESULT (subr
));
6037 int unsignedp
= TREE_UNSIGNED (type
);
6039 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
6042 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
6045 /* Scalar, returned in a register. */
6047 #ifdef FUNCTION_OUTGOING_VALUE
6048 DECL_RTL (DECL_RESULT (subr
))
6049 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6051 DECL_RTL (DECL_RESULT (subr
))
6052 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
6055 /* Mark this reg as the function's return value. */
6056 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
6058 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
6059 /* Needed because we may need to move this to memory
6060 in case it's a named return value whose address is taken. */
6061 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
6065 /* Initialize rtx for parameters and local variables.
6066 In some cases this requires emitting insns. */
6068 assign_parms (subr
);
6070 /* Copy the static chain now if it wasn't a register. The delay is to
6071 avoid conflicts with the parameter passing registers. */
6073 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
6074 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
6075 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
6077 /* The following was moved from init_function_start.
6078 The move is supposed to make sdb output more accurate. */
6079 /* Indicate the beginning of the function body,
6080 as opposed to parm setup. */
6081 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
6083 /* If doing stupid allocation, mark parms as born here. */
6085 if (GET_CODE (get_last_insn ()) != NOTE
)
6086 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6087 parm_birth_insn
= get_last_insn ();
6091 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
6092 use_variable (regno_reg_rtx
[i
]);
6094 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
6095 use_variable (current_function_internal_arg_pointer
);
6098 context_display
= 0;
6099 if (current_function_needs_context
)
6101 /* Fetch static chain values for containing functions. */
6102 tem
= decl_function_context (current_function_decl
);
6103 /* If not doing stupid register allocation copy the static chain
6104 pointer into a pseudo. If we have small register classes, copy
6105 the value from memory if static_chain_incoming_rtx is a REG. If
6106 we do stupid register allocation, we use the stack address
6108 if (tem
&& ! obey_regdecls
)
6110 /* If the static chain originally came in a register, put it back
6111 there, then move it out in the next insn. The reason for
6112 this peculiar code is to satisfy function integration. */
6113 if (SMALL_REGISTER_CLASSES
6114 && GET_CODE (static_chain_incoming_rtx
) == REG
)
6115 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
6116 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
6121 tree rtlexp
= make_node (RTL_EXPR
);
6123 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
6124 context_display
= tree_cons (tem
, rtlexp
, context_display
);
6125 tem
= decl_function_context (tem
);
6128 /* Chain thru stack frames, assuming pointer to next lexical frame
6129 is found at the place we always store it. */
6130 #ifdef FRAME_GROWS_DOWNWARD
6131 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
6133 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
6134 memory_address (Pmode
,
6137 /* If we are not optimizing, ensure that we know that this
6138 piece of context is live over the entire function. */
6140 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
6145 if (current_function_instrument_entry_exit
)
6147 rtx fun
= DECL_RTL (current_function_decl
);
6148 if (GET_CODE (fun
) == MEM
)
6149 fun
= XEXP (fun
, 0);
6152 emit_library_call (profile_function_entry_libfunc
, 0, VOIDmode
, 2,
6154 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6156 hard_frame_pointer_rtx
),
6160 /* After the display initializations is where the tail-recursion label
6161 should go, if we end up needing one. Ensure we have a NOTE here
6162 since some things (like trampolines) get placed before this. */
6163 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
6165 /* Evaluate now the sizes of any types declared among the arguments. */
6166 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
6168 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
6169 EXPAND_MEMORY_USE_BAD
);
6170 /* Flush the queue in case this parameter declaration has
6175 /* Make sure there is a line number after the function entry setup code. */
6176 force_next_line_note ();
6179 /* Undo the effects of init_dummy_function_start. */
6181 expand_dummy_function_end ()
6183 /* End any sequences that failed to be closed due to syntax errors. */
6184 while (in_sequence_p ())
6187 /* Outside function body, can't compute type's actual size
6188 until next function's body starts. */
6190 free_after_parsing (current_function
);
6191 free_after_compilation (current_function
);
6192 free (current_function
);
6193 current_function
= 0;
6196 /* Emit CODE for each register of the return value. Useful values for
6197 code are USE and CLOBBER. */
6200 diddle_return_value (code
)
6203 tree decl_result
= DECL_RESULT (current_function_decl
);
6204 rtx return_reg
= DECL_RTL (decl_result
);
6208 if (GET_CODE (return_reg
) == REG
6209 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
6211 /* Use hard_function_value to avoid creating a reference to a BLKmode
6212 register in the USE/CLOBBER insn. */
6213 return_reg
= hard_function_value (TREE_TYPE (decl_result
),
6214 current_function_decl
, 1);
6215 REG_FUNCTION_VALUE_P (return_reg
) = 1;
6216 emit_insn (gen_rtx_fmt_e (code
, VOIDmode
, return_reg
));
6218 else if (GET_CODE (return_reg
) == PARALLEL
)
6222 for (i
= 0; i
< XVECLEN (return_reg
, 0); i
++)
6224 rtx x
= XEXP (XVECEXP (return_reg
, 0, i
), 0);
6226 if (GET_CODE (x
) == REG
6227 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6228 emit_insn (gen_rtx_fmt_e (code
, VOIDmode
, x
));
6234 /* Generate RTL for the end of the current function.
6235 FILENAME and LINE are the current position in the source file.
6237 It is up to language-specific callers to do cleanups for parameters--
6238 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6241 expand_function_end (filename
, line
, end_bindings
)
6249 #ifdef TRAMPOLINE_TEMPLATE
6250 static rtx initial_trampoline
;
6253 finish_expr_for_function ();
6255 #ifdef NON_SAVING_SETJMP
6256 /* Don't put any variables in registers if we call setjmp
6257 on a machine that fails to restore the registers. */
6258 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
6260 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
6261 setjmp_protect (DECL_INITIAL (current_function_decl
));
6263 setjmp_protect_args ();
6267 /* Save the argument pointer if a save area was made for it. */
6268 if (arg_pointer_save_area
)
6270 /* arg_pointer_save_area may not be a valid memory address, so we
6271 have to check it and fix it if necessary. */
6274 emit_move_insn (validize_mem (arg_pointer_save_area
),
6275 virtual_incoming_args_rtx
);
6276 seq
= gen_sequence ();
6278 emit_insn_before (seq
, tail_recursion_reentry
);
6281 /* Initialize any trampolines required by this function. */
6282 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
6284 tree function
= TREE_PURPOSE (link
);
6285 rtx context
= lookup_static_chain (function
);
6286 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
6287 #ifdef TRAMPOLINE_TEMPLATE
6292 #ifdef TRAMPOLINE_TEMPLATE
6293 /* First make sure this compilation has a template for
6294 initializing trampolines. */
6295 if (initial_trampoline
== 0)
6297 end_temporary_allocation ();
6299 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
6300 resume_temporary_allocation ();
6302 ggc_add_rtx_root (&initial_trampoline
, 1);
6306 /* Generate insns to initialize the trampoline. */
6308 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
6309 #ifdef TRAMPOLINE_TEMPLATE
6310 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
6311 emit_block_move (blktramp
, initial_trampoline
,
6312 GEN_INT (TRAMPOLINE_SIZE
),
6313 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
6315 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
6319 /* Put those insns at entry to the containing function (this one). */
6320 emit_insns_before (seq
, tail_recursion_reentry
);
6323 /* If we are doing stack checking and this function makes calls,
6324 do a stack probe at the start of the function to ensure we have enough
6325 space for another stack frame. */
6326 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
6330 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6331 if (GET_CODE (insn
) == CALL_INSN
)
6334 probe_stack_range (STACK_CHECK_PROTECT
,
6335 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
6338 emit_insns_before (seq
, tail_recursion_reentry
);
6343 /* Warn about unused parms if extra warnings were specified. */
6344 if (warn_unused
&& extra_warnings
)
6348 for (decl
= DECL_ARGUMENTS (current_function_decl
);
6349 decl
; decl
= TREE_CHAIN (decl
))
6350 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
6351 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
6352 warning_with_decl (decl
, "unused parameter `%s'");
6355 /* Delete handlers for nonlocal gotos if nothing uses them. */
6356 if (nonlocal_goto_handler_slots
!= 0
6357 && ! current_function_has_nonlocal_label
)
6360 /* End any sequences that failed to be closed due to syntax errors. */
6361 while (in_sequence_p ())
6364 /* Outside function body, can't compute type's actual size
6365 until next function's body starts. */
6366 immediate_size_expand
--;
6368 /* If doing stupid register allocation,
6369 mark register parms as dying here. */
6374 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
6375 use_variable (regno_reg_rtx
[i
]);
6377 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6379 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
6381 use_variable (XEXP (tem
, 0));
6382 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
6385 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
6386 use_variable (current_function_internal_arg_pointer
);
6389 clear_pending_stack_adjust ();
6390 do_pending_stack_adjust ();
6392 /* Mark the end of the function body.
6393 If control reaches this insn, the function can drop through
6394 without returning a value. */
6395 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
6397 /* Must mark the last line number note in the function, so that the test
6398 coverage code can avoid counting the last line twice. This just tells
6399 the code to ignore the immediately following line note, since there
6400 already exists a copy of this note somewhere above. This line number
6401 note is still needed for debugging though, so we can't delete it. */
6402 if (flag_test_coverage
)
6403 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
6405 /* Output a linenumber for the end of the function.
6406 SDB depends on this. */
6407 emit_line_note_force (filename
, line
);
6409 /* Output the label for the actual return from the function,
6410 if one is expected. This happens either because a function epilogue
6411 is used instead of a return instruction, or because a return was done
6412 with a goto in order to run local cleanups, or because of pcc-style
6413 structure returning. */
6417 /* Before the return label, clobber the return registers so that
6418 they are not propogated live to the rest of the function. This
6419 can only happen with functions that drop through; if there had
6420 been a return statement, there would have either been a return
6421 rtx, or a jump to the return label. */
6422 diddle_return_value (CLOBBER
);
6424 emit_label (return_label
);
6427 /* C++ uses this. */
6429 expand_end_bindings (0, 0, 0);
6431 /* Now handle any leftover exception regions that may have been
6432 created for the parameters. */
6434 rtx last
= get_last_insn ();
6437 expand_leftover_cleanups ();
6439 /* If there are any catch_clauses remaining, output them now. */
6440 emit_insns (catch_clauses
);
6441 catch_clauses
= NULL_RTX
;
6442 /* If the above emitted any code, may sure we jump around it. */
6443 if (last
!= get_last_insn ())
6445 label
= gen_label_rtx ();
6446 last
= emit_jump_insn_after (gen_jump (label
), last
);
6447 last
= emit_barrier_after (last
);
6452 if (current_function_instrument_entry_exit
)
6454 rtx fun
= DECL_RTL (current_function_decl
);
6455 if (GET_CODE (fun
) == MEM
)
6456 fun
= XEXP (fun
, 0);
6459 emit_library_call (profile_function_exit_libfunc
, 0, VOIDmode
, 2,
6461 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
6463 hard_frame_pointer_rtx
),
6467 /* If we had calls to alloca, and this machine needs
6468 an accurate stack pointer to exit the function,
6469 insert some code to save and restore the stack pointer. */
6470 #ifdef EXIT_IGNORE_STACK
6471 if (! EXIT_IGNORE_STACK
)
6473 if (current_function_calls_alloca
)
6477 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
6478 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
6481 /* If scalar return value was computed in a pseudo-reg,
6482 copy that to the hard return register. */
6483 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
6484 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
6485 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
6486 >= FIRST_PSEUDO_REGISTER
))
6488 rtx real_decl_result
;
6490 #ifdef FUNCTION_OUTGOING_VALUE
6492 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6493 current_function_decl
);
6496 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
6497 current_function_decl
);
6499 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
6500 /* If this is a BLKmode structure being returned in registers, then use
6501 the mode computed in expand_return. */
6502 if (GET_MODE (real_decl_result
) == BLKmode
)
6503 PUT_MODE (real_decl_result
,
6504 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
6505 emit_move_insn (real_decl_result
,
6506 DECL_RTL (DECL_RESULT (current_function_decl
)));
6507 emit_insn (gen_rtx_USE (VOIDmode
, real_decl_result
));
6509 /* The delay slot scheduler assumes that current_function_return_rtx
6510 holds the hard register containing the return value, not a temporary
6512 current_function_return_rtx
= real_decl_result
;
6515 /* If returning a structure, arrange to return the address of the value
6516 in a place where debuggers expect to find it.
6518 If returning a structure PCC style,
6519 the caller also depends on this value.
6520 And current_function_returns_pcc_struct is not necessarily set. */
6521 if (current_function_returns_struct
6522 || current_function_returns_pcc_struct
)
6524 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6525 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
6526 #ifdef FUNCTION_OUTGOING_VALUE
6528 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
6529 current_function_decl
);
6532 = FUNCTION_VALUE (build_pointer_type (type
),
6533 current_function_decl
);
6536 /* Mark this as a function return value so integrate will delete the
6537 assignment and USE below when inlining this function. */
6538 REG_FUNCTION_VALUE_P (outgoing
) = 1;
6540 emit_move_insn (outgoing
, value_address
);
6541 use_variable (outgoing
);
6544 /* If this is an implementation of __throw, do what's necessary to
6545 communicate between __builtin_eh_return and the epilogue. */
6546 expand_eh_return ();
6548 /* Output a return insn if we are using one.
6549 Otherwise, let the rtl chain end here, to drop through
6550 into the epilogue. */
6555 emit_jump_insn (gen_return ());
6560 /* Fix up any gotos that jumped out to the outermost
6561 binding level of the function.
6562 Must follow emitting RETURN_LABEL. */
6564 /* If you have any cleanups to do at this point,
6565 and they need to create temporary variables,
6566 then you will lose. */
6567 expand_fixups (get_insns ());
6570 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6571 or a single insn). */
6574 record_insns (insns
)
6579 if (GET_CODE (insns
) == SEQUENCE
)
6581 int len
= XVECLEN (insns
, 0);
6582 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
6585 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
6589 vec
= (int *) oballoc (2 * sizeof (int));
6590 vec
[0] = INSN_UID (insns
);
6596 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6599 contains (insn
, vec
)
6605 if (GET_CODE (insn
) == INSN
6606 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6609 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6610 for (j
= 0; vec
[j
]; j
++)
6611 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
6617 for (j
= 0; vec
[j
]; j
++)
6618 if (INSN_UID (insn
) == vec
[j
])
6625 prologue_epilogue_contains (insn
)
6628 if (prologue
&& contains (insn
, prologue
))
6630 if (epilogue
&& contains (insn
, epilogue
))
6635 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6636 this into place with notes indicating where the prologue ends and where
6637 the epilogue begins. Update the basic block information when possible. */
6640 thread_prologue_and_epilogue_insns (f
)
6641 rtx f ATTRIBUTE_UNUSED
;
6645 #ifdef HAVE_prologue
6651 seq
= gen_prologue();
6654 /* Retain a map of the prologue insns. */
6655 if (GET_CODE (seq
) != SEQUENCE
)
6657 prologue
= record_insns (seq
);
6659 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
6660 seq
= gen_sequence ();
6663 /* If optimization is off, and perhaps in an empty function,
6664 the entry block will have no successors. */
6665 if (ENTRY_BLOCK_PTR
->succ
)
6667 /* Can't deal with multiple successsors of the entry block. */
6668 if (ENTRY_BLOCK_PTR
->succ
->succ_next
)
6671 insert_insn_on_edge (seq
, ENTRY_BLOCK_PTR
->succ
);
6675 emit_insn_after (seq
, f
);
6679 #ifdef HAVE_epilogue
6684 rtx tail
= get_last_insn ();
6686 /* ??? This is gastly. If function returns were not done via uses,
6687 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6688 and all of this uglyness would go away. */
6693 /* If the exit block has no non-fake predecessors, we don't
6694 need an epilogue. Furthermore, only pay attention to the
6695 fallthru predecessors; if (conditional) return insns were
6696 generated, by definition we do not need to emit epilogue
6699 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
6700 if ((e
->flags
& EDGE_FAKE
) == 0
6701 && (e
->flags
& EDGE_FALLTHRU
) != 0)
6706 /* We can't handle multiple epilogues -- if one is needed,
6707 we won't be able to place it multiple times.
6709 ??? Fix epilogue expanders to not assume they are the
6710 last thing done compiling the function. Either that
6711 or copy_rtx each insn.
6713 ??? Blah, it's not a simple expression to assert that
6714 we've exactly one fallthru exit edge. */
6719 /* ??? If the last insn of the basic block is a jump, then we
6720 are creating a new basic block. Wimp out and leave these
6721 insns outside any block. */
6722 if (GET_CODE (tail
) == JUMP_INSN
)
6728 rtx prev
, seq
, first_use
;
6730 /* Move the USE insns at the end of a function onto a list. */
6732 if (GET_CODE (prev
) == BARRIER
6733 || GET_CODE (prev
) == NOTE
)
6734 prev
= prev_nonnote_insn (prev
);
6738 && GET_CODE (prev
) == INSN
6739 && GET_CODE (PATTERN (prev
)) == USE
)
6741 /* If the end of the block is the use, grab hold of something
6742 else so that we emit barriers etc in the right place. */
6746 tail
= PREV_INSN (tail
);
6747 while (GET_CODE (tail
) == INSN
6748 && GET_CODE (PATTERN (tail
)) == USE
);
6754 prev
= prev_nonnote_insn (prev
);
6759 NEXT_INSN (use
) = first_use
;
6760 PREV_INSN (first_use
) = use
;
6763 NEXT_INSN (use
) = NULL_RTX
;
6767 && GET_CODE (prev
) == INSN
6768 && GET_CODE (PATTERN (prev
)) == USE
);
6771 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6772 epilogue insns, the USE insns at the end of a function,
6773 the jump insn that returns, and then a BARRIER. */
6775 if (GET_CODE (tail
) != BARRIER
)
6777 prev
= next_nonnote_insn (tail
);
6778 if (!prev
|| GET_CODE (prev
) != BARRIER
)
6779 emit_barrier_after (tail
);
6782 seq
= gen_epilogue ();
6784 tail
= emit_jump_insn_after (seq
, tail
);
6786 /* Insert the USE insns immediately before the return insn, which
6787 must be the last instruction emitted in the sequence. */
6789 emit_insns_before (first_use
, tail
);
6790 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6792 /* Update the tail of the basic block. */
6796 /* Retain a map of the epilogue insns. */
6797 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
6804 commit_edge_insertions ();
6807 /* Reposition the prologue-end and epilogue-begin notes after instruction
6808 scheduling and delayed branch scheduling. */
6811 reposition_prologue_and_epilogue_notes (f
)
6812 rtx f ATTRIBUTE_UNUSED
;
6814 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6815 /* Reposition the prologue and epilogue notes. */
6822 register rtx insn
, note
= 0;
6824 /* Scan from the beginning until we reach the last prologue insn.
6825 We apparently can't depend on basic_block_{head,end} after
6827 for (len
= 0; prologue
[len
]; len
++)
6829 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
6831 if (GET_CODE (insn
) == NOTE
)
6833 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
6836 else if ((len
-= contains (insn
, prologue
)) == 0)
6839 /* Find the prologue-end note if we haven't already, and
6840 move it to just after the last prologue insn. */
6843 for (note
= insn
; (note
= NEXT_INSN (note
));)
6844 if (GET_CODE (note
) == NOTE
6845 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
6849 next
= NEXT_INSN (note
);
6851 /* Whether or not we can depend on BLOCK_HEAD,
6852 attempt to keep it up-to-date. */
6853 if (BLOCK_HEAD (0) == note
)
6854 BLOCK_HEAD (0) = next
;
6857 add_insn_after (note
, insn
);
6864 register rtx insn
, note
= 0;
6866 /* Scan from the end until we reach the first epilogue insn.
6867 We apparently can't depend on basic_block_{head,end} after
6869 for (len
= 0; epilogue
[len
]; len
++)
6871 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
6873 if (GET_CODE (insn
) == NOTE
)
6875 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6878 else if ((len
-= contains (insn
, epilogue
)) == 0)
6880 /* Find the epilogue-begin note if we haven't already, and
6881 move it to just before the first epilogue insn. */
6884 for (note
= insn
; (note
= PREV_INSN (note
));)
6885 if (GET_CODE (note
) == NOTE
6886 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
6890 /* Whether or not we can depend on BLOCK_HEAD,
6891 attempt to keep it up-to-date. */
6893 && BLOCK_HEAD (n_basic_blocks
-1) == insn
)
6894 BLOCK_HEAD (n_basic_blocks
-1) = note
;
6897 add_insn_before (note
, insn
);
6902 #endif /* HAVE_prologue or HAVE_epilogue */
6905 /* Mark T for GC. */
6909 struct temp_slot
*t
;
6913 ggc_mark_rtx (t
->slot
);
6914 ggc_mark_rtx (t
->address
);
6915 ggc_mark_tree (t
->rtl_expr
);
6921 /* Mark P for GC. */
6924 mark_function_status (p
)
6933 ggc_mark_rtx (p
->arg_offset_rtx
);
6935 if (p
->x_parm_reg_stack_loc
)
6936 for (i
= p
->x_max_parm_reg
, r
= p
->x_parm_reg_stack_loc
;
6940 ggc_mark_rtx (p
->return_rtx
);
6941 ggc_mark_rtx (p
->x_cleanup_label
);
6942 ggc_mark_rtx (p
->x_return_label
);
6943 ggc_mark_rtx (p
->x_save_expr_regs
);
6944 ggc_mark_rtx (p
->x_stack_slot_list
);
6945 ggc_mark_rtx (p
->x_parm_birth_insn
);
6946 ggc_mark_rtx (p
->x_tail_recursion_label
);
6947 ggc_mark_rtx (p
->x_tail_recursion_reentry
);
6948 ggc_mark_rtx (p
->internal_arg_pointer
);
6949 ggc_mark_rtx (p
->x_arg_pointer_save_area
);
6950 ggc_mark_tree (p
->x_rtl_expr_chain
);
6951 ggc_mark_rtx (p
->x_last_parm_insn
);
6952 ggc_mark_tree (p
->x_context_display
);
6953 ggc_mark_tree (p
->x_trampoline_list
);
6954 ggc_mark_rtx (p
->epilogue_delay_list
);
6956 mark_temp_slot (p
->x_temp_slots
);
6959 struct var_refs_queue
*q
= p
->fixup_var_refs_queue
;
6962 ggc_mark_rtx (q
->modified
);
6967 ggc_mark_rtx (p
->x_nonlocal_goto_handler_slots
);
6968 ggc_mark_rtx (p
->x_nonlocal_goto_handler_labels
);
6969 ggc_mark_rtx (p
->x_nonlocal_goto_stack_level
);
6970 ggc_mark_tree (p
->x_nonlocal_labels
);
6973 /* Mark the function chain ARG (which is really a struct function **)
6977 mark_function_chain (arg
)
6980 struct function
*f
= *(struct function
**) arg
;
6982 for (; f
; f
= f
->next_global
)
6984 ggc_mark_tree (f
->decl
);
6986 mark_function_status (f
);
6987 mark_eh_status (f
->eh
);
6988 mark_stmt_status (f
->stmt
);
6989 mark_expr_status (f
->expr
);
6990 mark_emit_status (f
->emit
);
6991 mark_varasm_status (f
->varasm
);
6993 if (mark_machine_status
)
6994 (*mark_machine_status
) (f
);
6995 if (mark_lang_status
)
6996 (*mark_lang_status
) (f
);
6998 if (f
->original_arg_vector
)
6999 ggc_mark_rtvec ((rtvec
) f
->original_arg_vector
);
7000 if (f
->original_decl_initial
)
7001 ggc_mark_tree (f
->original_decl_initial
);
7005 /* Called once, at initialization, to initialize function.c. */
7008 init_function_once ()
7010 ggc_add_root (&all_functions
, 1, sizeof all_functions
,
7011 mark_function_chain
);