1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
40 #include "rtl-error.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
57 #include "integrate.h"
58 #include "langhooks.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
63 #include "tree-pass.h"
69 #include "bb-reorder.h"
71 /* So we can assign to cfun in this file. */
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
85 #define NAME__MAIN "__main"
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
101 int current_function_is_leaf
;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 pass_stack_ptr_mod has run. */
106 int current_function_sp_is_unchanging
;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs
;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated
;
119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
120 static GTY(()) int funcdef_no
;
122 /* These variables hold pointers to functions to create and destroy
123 target specific, per-function data structures. */
124 struct machine_function
* (*init_machine_status
) (void);
126 /* The currently compiled function. */
127 struct function
*cfun
= 0;
129 /* These hashes record the prologue and epilogue insns. */
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
131 htab_t prologue_insn_hash
;
132 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
133 htab_t epilogue_insn_hash
;
136 htab_t types_used_by_vars_hash
= NULL
;
137 VEC(tree
,gc
) *types_used_by_cur_var_decl
;
139 /* Forward declarations. */
141 static struct temp_slot
*find_temp_slot_from_address (rtx
);
142 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
143 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
144 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
145 static int all_blocks (tree
, tree
*);
146 static tree
*get_block_vector (tree
, int *);
147 extern tree
debug_find_var_in_block_tree (tree
, tree
);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
151 static bool contains (const_rtx
, htab_t
);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx
, void *);
154 static void do_use_return_reg (rtx
, void *);
155 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function
*function_p
;
162 DEF_VEC_P(function_p
);
163 DEF_VEC_ALLOC_P(function_p
,heap
);
164 static VEC(function_p
,heap
) *function_context_stack
;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
170 push_function_context (void)
173 allocate_struct_function (NULL
, false);
175 VEC_safe_push (function_p
, heap
, function_context_stack
, cfun
);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
183 pop_function_context (void)
185 struct function
*p
= VEC_pop (function_p
, function_context_stack
);
187 current_function_decl
= p
->decl
;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated
= 0;
191 generating_concat_p
= 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
199 free_after_parsing (struct function
*f
)
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
209 free_after_compilation (struct function
*f
)
211 prologue_insn_hash
= NULL
;
212 epilogue_insn_hash
= NULL
;
214 free (crtl
->emit
.regno_pointer_align
);
216 memset (crtl
, 0, sizeof (struct rtl_data
));
221 regno_reg_rtx
= NULL
;
222 insn_locators_free ();
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
230 get_frame_size (void)
232 if (FRAME_GROWS_DOWNWARD
)
233 return -frame_offset
;
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
243 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
245 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
247 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD
)
251 error_at (DECL_SOURCE_LOCATION (func
),
252 "total size of local objects too large");
259 /* Return stack slot alignment in bits for TYPE and MODE. */
262 get_stack_local_alignment (tree type
, enum machine_mode mode
)
264 unsigned int alignment
;
267 alignment
= BIGGEST_ALIGNMENT
;
269 alignment
= GET_MODE_ALIGNMENT (mode
);
271 /* Allow the frond-end to (possibly) increase the alignment of this
274 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
276 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
287 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
288 HOST_WIDE_INT size
, unsigned int alignment
,
289 HOST_WIDE_INT
*poffset
)
291 HOST_WIDE_INT this_frame_offset
;
292 int frame_off
, frame_alignment
, frame_phase
;
294 /* Calculate how many bytes the start of local variables is off from
296 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
297 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
298 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD
)
308 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
309 (unsigned HOST_WIDE_INT
) alignment
)
313 = (CEIL_ROUND (start
- frame_phase
,
314 (unsigned HOST_WIDE_INT
) alignment
)
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
321 frame_offset
= this_frame_offset
;
322 else if (this_frame_offset
< start
)
324 else if (start
+ length
== frame_offset
325 && this_frame_offset
+ size
> start
+ length
)
326 frame_offset
= this_frame_offset
+ size
;
327 else if (this_frame_offset
+ size
> start
+ length
)
330 *poffset
= this_frame_offset
;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
339 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
341 struct frame_space
*space
= ggc_alloc_frame_space ();
342 space
->next
= crtl
->frame_space_list
;
343 crtl
->frame_space_list
= space
;
344 space
->start
= start
;
345 space
->length
= end
- start
;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
370 int bigend_correction
= 0;
371 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
372 unsigned int alignment
, alignment_in_bits
;
376 alignment
= get_stack_local_alignment (NULL
, mode
);
377 alignment
/= BITS_PER_UNIT
;
379 else if (align
== -1)
381 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
382 size
= CEIL_ROUND (size
, alignment
);
384 else if (align
== -2)
385 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 alignment
= align
/ BITS_PER_UNIT
;
389 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
394 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
395 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
398 if (SUPPORTS_STACK_ALIGNMENT
)
400 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
402 if (!crtl
->stack_realign_processed
)
403 crtl
->stack_alignment_estimated
= alignment_in_bits
;
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
411 gcc_assert (!crtl
->stack_realign_finalized
);
412 if (!crtl
->stack_realign_needed
)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
419 || (crtl
->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode
)));
421 alignment_in_bits
= crtl
->stack_alignment_estimated
;
422 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
428 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
429 crtl
->stack_alignment_needed
= alignment_in_bits
;
430 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
431 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
433 if (mode
!= BLKmode
|| size
!= 0)
435 if (kind
& ASLK_RECORD_PAD
)
437 struct frame_space
**psp
;
439 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
441 struct frame_space
*space
= *psp
;
442 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
443 alignment
, &slot_offset
))
446 if (slot_offset
> space
->start
)
447 add_frame_space (space
->start
, slot_offset
);
448 if (slot_offset
+ size
< space
->start
+ space
->length
)
449 add_frame_space (slot_offset
+ size
,
450 space
->start
+ space
->length
);
455 else if (!STACK_ALIGNMENT_NEEDED
)
457 slot_offset
= frame_offset
;
461 old_frame_offset
= frame_offset
;
463 if (FRAME_GROWS_DOWNWARD
)
465 frame_offset
-= size
;
466 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
468 if (kind
& ASLK_RECORD_PAD
)
470 if (slot_offset
> frame_offset
)
471 add_frame_space (frame_offset
, slot_offset
);
472 if (slot_offset
+ size
< old_frame_offset
)
473 add_frame_space (slot_offset
+ size
, old_frame_offset
);
478 frame_offset
+= size
;
479 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
481 if (kind
& ASLK_RECORD_PAD
)
483 if (slot_offset
> old_frame_offset
)
484 add_frame_space (old_frame_offset
, slot_offset
);
485 if (slot_offset
+ size
< frame_offset
)
486 add_frame_space (slot_offset
+ size
, frame_offset
);
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
494 bigend_correction
= size
- GET_MODE_SIZE (mode
);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated
)
499 addr
= plus_constant (frame_pointer_rtx
,
501 (slot_offset
+ bigend_correction
502 + STARTING_FRAME_OFFSET
, Pmode
));
504 addr
= plus_constant (virtual_stack_vars_rtx
,
506 (slot_offset
+ bigend_correction
,
509 x
= gen_rtx_MEM (mode
, addr
);
510 set_mem_align (x
, alignment_in_bits
);
511 MEM_NOTRAP_P (x
) = 1;
514 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
516 if (frame_offset_overflow (frame_offset
, current_function_decl
))
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
525 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
527 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level.
545 Automatic variables are also assigned temporary slots, at the nesting
546 level where they are defined. They are marked a "kept" so that
547 free_temp_slots will not free them. */
549 struct GTY(()) temp_slot
{
550 /* Points to next temporary slot. */
551 struct temp_slot
*next
;
552 /* Points to previous temporary slot. */
553 struct temp_slot
*prev
;
554 /* The rtx to used to reference the slot. */
556 /* The size, in units, of the slot. */
558 /* The type of the object in the slot, or zero if it doesn't correspond
559 to a type. We use this to determine whether a slot can be reused.
560 It can be reused if objects of the type of the new slot will always
561 conflict with objects of the type of the old slot. */
563 /* The alignment (in bits) of the slot. */
565 /* Nonzero if this temporary is currently in use. */
567 /* Nonzero if this temporary has its address taken. */
569 /* Nesting level at which this slot is being used. */
571 /* Nonzero if this should survive a call to free_temp_slots. */
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset
;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size
;
581 /* A table of addresses that represent a stack slot. The table is a mapping
582 from address RTXen to a temp slot. */
583 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
585 /* Entry for the above hash table. */
586 struct GTY(()) temp_slot_address_entry
{
589 struct temp_slot
*temp_slot
;
592 /* Removes temporary slot TEMP from LIST. */
595 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
598 temp
->next
->prev
= temp
->prev
;
600 temp
->prev
->next
= temp
->next
;
604 temp
->prev
= temp
->next
= NULL
;
607 /* Inserts temporary slot TEMP to LIST. */
610 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
614 (*list
)->prev
= temp
;
619 /* Returns the list of used temp slots at LEVEL. */
621 static struct temp_slot
**
622 temp_slots_at_level (int level
)
624 if (level
>= (int) VEC_length (temp_slot_p
, used_temp_slots
))
625 VEC_safe_grow_cleared (temp_slot_p
, gc
, used_temp_slots
, level
+ 1);
627 return &(VEC_address (temp_slot_p
, used_temp_slots
)[level
]);
630 /* Returns the maximal temporary slot level. */
633 max_slot_level (void)
635 if (!used_temp_slots
)
638 return VEC_length (temp_slot_p
, used_temp_slots
) - 1;
641 /* Moves temporary slot TEMP to LEVEL. */
644 move_slot_to_level (struct temp_slot
*temp
, int level
)
646 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
647 insert_slot_to_list (temp
, temp_slots_at_level (level
));
651 /* Make temporary slot TEMP available. */
654 make_slot_available (struct temp_slot
*temp
)
656 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
657 insert_slot_to_list (temp
, &avail_temp_slots
);
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
665 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
667 int do_not_record
= 0;
668 return hash_rtx (t
->address
, GET_MODE (t
->address
),
669 &do_not_record
, NULL
, false);
672 /* Return the hash value for an address -> temp slot mapping. */
674 temp_slot_address_hash (const void *p
)
676 const struct temp_slot_address_entry
*t
;
677 t
= (const struct temp_slot_address_entry
*) p
;
681 /* Compare two address -> temp slot mapping entries. */
683 temp_slot_address_eq (const void *p1
, const void *p2
)
685 const struct temp_slot_address_entry
*t1
, *t2
;
686 t1
= (const struct temp_slot_address_entry
*) p1
;
687 t2
= (const struct temp_slot_address_entry
*) p2
;
688 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
693 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
696 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
697 t
->address
= address
;
698 t
->temp_slot
= temp_slot
;
699 t
->hash
= temp_slot_address_compute_hash (t
);
700 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
704 /* Remove an address -> temp slot mapping entry if the temp slot is
705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
707 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
709 const struct temp_slot_address_entry
*t
;
710 t
= (const struct temp_slot_address_entry
*) *slot
;
711 if (! t
->temp_slot
->in_use
)
716 /* Remove all mappings of addresses to unused temp slots. */
718 remove_unused_temp_slot_addresses (void)
720 htab_traverse (temp_slot_address_table
,
721 remove_unused_temp_slot_addresses_1
,
725 /* Find the temp slot corresponding to the object at address X. */
727 static struct temp_slot
*
728 find_temp_slot_from_address (rtx x
)
731 struct temp_slot_address_entry tmp
, *t
;
733 /* First try the easy way:
734 See if X exists in the address -> temp slot mapping. */
736 tmp
.temp_slot
= NULL
;
737 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
738 t
= (struct temp_slot_address_entry
*)
739 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
743 /* If we have a sum involving a register, see if it points to a temp
745 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
746 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
748 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
749 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
752 /* Last resort: Address is a virtual stack var address. */
753 if (GET_CODE (x
) == PLUS
754 && XEXP (x
, 0) == virtual_stack_vars_rtx
755 && CONST_INT_P (XEXP (x
, 1)))
758 for (i
= max_slot_level (); i
>= 0; i
--)
759 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
761 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
762 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
770 /* Allocate a temporary stack slot and record it for possible later
773 MODE is the machine mode to be given to the returned rtx.
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
778 KEEP is 1 if this slot is to be retained after a call to
779 free_temp_slots. Automatic variables for a block are allocated
780 with this flag. KEEP values of 2 or 3 were needed respectively
781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
782 or for SAVE_EXPRs, but they are now unused.
784 TYPE is the type that will be used for the stack slot. */
787 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
791 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
794 /* If SIZE is -1 it means that somebody tried to allocate a temporary
795 of a variable size. */
796 gcc_assert (size
!= -1);
798 /* These are now unused. */
799 gcc_assert (keep
<= 1);
801 align
= get_stack_local_alignment (type
, mode
);
803 /* Try to find an available, already-allocated temporary of the proper
804 mode which meets the size and alignment requirements. Choose the
805 smallest one with the closest alignment.
807 If assign_stack_temp is called outside of the tree->rtl expansion,
808 we cannot reuse the stack slots (that may still refer to
809 VIRTUAL_STACK_VARS_REGNUM). */
810 if (!virtuals_instantiated
)
812 for (p
= avail_temp_slots
; p
; p
= p
->next
)
814 if (p
->align
>= align
&& p
->size
>= size
815 && GET_MODE (p
->slot
) == mode
816 && objects_must_conflict_p (p
->type
, type
)
817 && (best_p
== 0 || best_p
->size
> p
->size
818 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
820 if (p
->align
== align
&& p
->size
== size
)
823 cut_slot_from_list (selected
, &avail_temp_slots
);
832 /* Make our best, if any, the one to use. */
836 cut_slot_from_list (selected
, &avail_temp_slots
);
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p
->slot
) == BLKmode
)
843 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
844 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
846 if (best_p
->size
- rounded_size
>= alignment
)
848 p
= ggc_alloc_temp_slot ();
849 p
->in_use
= p
->addr_taken
= 0;
850 p
->size
= best_p
->size
- rounded_size
;
851 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
852 p
->full_size
= best_p
->full_size
- rounded_size
;
853 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
854 p
->align
= best_p
->align
;
855 p
->type
= best_p
->type
;
856 insert_slot_to_list (p
, &avail_temp_slots
);
858 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
861 best_p
->size
= rounded_size
;
862 best_p
->full_size
= rounded_size
;
867 /* If we still didn't find one, make a new temporary. */
870 HOST_WIDE_INT frame_offset_old
= frame_offset
;
872 p
= ggc_alloc_temp_slot ();
874 /* We are passing an explicit alignment request to assign_stack_local.
875 One side effect of that is assign_stack_local will not round SIZE
876 to ensure the frame offset remains suitably aligned.
878 So for requests which depended on the rounding of SIZE, we go ahead
879 and round it now. We also make sure ALIGNMENT is at least
880 BIGGEST_ALIGNMENT. */
881 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
882 p
->slot
= assign_stack_local_1 (mode
,
892 /* The following slot size computation is necessary because we don't
893 know the actual size of the temporary slot until assign_stack_local
894 has performed all the frame alignment and size rounding for the
895 requested temporary. Note that extra space added for alignment
896 can be either above or below this stack slot depending on which
897 way the frame grows. We include the extra space if and only if it
898 is above this slot. */
899 if (FRAME_GROWS_DOWNWARD
)
900 p
->size
= frame_offset_old
- frame_offset
;
904 /* Now define the fields used by combine_temp_slots. */
905 if (FRAME_GROWS_DOWNWARD
)
907 p
->base_offset
= frame_offset
;
908 p
->full_size
= frame_offset_old
- frame_offset
;
912 p
->base_offset
= frame_offset_old
;
913 p
->full_size
= frame_offset
- frame_offset_old
;
923 p
->level
= temp_slot_level
;
926 pp
= temp_slots_at_level (p
->level
);
927 insert_slot_to_list (p
, pp
);
928 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
931 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
932 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
934 /* If we know the alias set for the memory that will be used, use
935 it. If there's no TYPE, then we don't know anything about the
936 alias set for the memory. */
937 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
938 set_mem_align (slot
, align
);
940 /* If a type is specified, set the relevant flags. */
942 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
943 MEM_NOTRAP_P (slot
) = 1;
948 /* Allocate a temporary stack slot and record it for possible later
949 reuse. First three arguments are same as in preceding function. */
952 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
954 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
957 /* Assign a temporary.
958 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
959 and so that should be used in error messages. In either case, we
960 allocate of the given type.
961 KEEP is as for assign_stack_temp.
962 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
963 it is 0 if a register is OK.
964 DONT_PROMOTE is 1 if we should not promote values in register
968 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
969 int dont_promote ATTRIBUTE_UNUSED
)
972 enum machine_mode mode
;
977 if (DECL_P (type_or_decl
))
978 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
980 decl
= NULL
, type
= type_or_decl
;
982 mode
= TYPE_MODE (type
);
984 unsignedp
= TYPE_UNSIGNED (type
);
987 if (mode
== BLKmode
|| memory_required
)
989 HOST_WIDE_INT size
= int_size_in_bytes (type
);
992 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
993 problems with allocating the stack space. */
997 /* Unfortunately, we don't yet know how to allocate variable-sized
998 temporaries. However, sometimes we can find a fixed upper limit on
999 the size, so try that instead. */
1000 else if (size
== -1)
1001 size
= max_int_size_in_bytes (type
);
1003 /* The size of the temporary may be too large to fit into an integer. */
1004 /* ??? Not sure this should happen except for user silliness, so limit
1005 this to things that aren't compiler-generated temporaries. The
1006 rest of the time we'll die in assign_stack_temp_for_type. */
1007 if (decl
&& size
== -1
1008 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1010 error ("size of variable %q+D is too large", decl
);
1014 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
1020 mode
= promote_mode (type
, mode
, &unsignedp
);
1023 return gen_reg_rtx (mode
);
1026 /* Combine temporary stack slots which are adjacent on the stack.
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1033 combine_temp_slots (void)
1035 struct temp_slot
*p
, *q
, *next
, *next_q
;
1038 /* We can't combine slots, because the information about which slot
1039 is in which alias set will be lost. */
1040 if (flag_strict_aliasing
)
1043 /* If there are a lot of temp slots, don't do anything unless
1044 high levels of optimization. */
1045 if (! flag_expensive_optimizations
)
1046 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1047 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1050 for (p
= avail_temp_slots
; p
; p
= next
)
1056 if (GET_MODE (p
->slot
) != BLKmode
)
1059 for (q
= p
->next
; q
; q
= next_q
)
1065 if (GET_MODE (q
->slot
) != BLKmode
)
1068 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1070 /* Q comes after P; combine Q into P. */
1072 p
->full_size
+= q
->full_size
;
1075 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1077 /* P comes after Q; combine P into Q. */
1079 q
->full_size
+= p
->full_size
;
1084 cut_slot_from_list (q
, &avail_temp_slots
);
1087 /* Either delete P or advance past it. */
1089 cut_slot_from_list (p
, &avail_temp_slots
);
1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1094 slot that previously was known by OLD_RTX. */
1097 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1099 struct temp_slot
*p
;
1101 if (rtx_equal_p (old_rtx
, new_rtx
))
1104 p
= find_temp_slot_from_address (old_rtx
);
1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1107 NEW_RTX is a register, see if one operand of the PLUS is a
1108 temporary location. If so, NEW_RTX points into it. Otherwise,
1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1110 in common between them. If so, try a recursive call on those
1114 if (GET_CODE (old_rtx
) != PLUS
)
1117 if (REG_P (new_rtx
))
1119 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1120 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1123 else if (GET_CODE (new_rtx
) != PLUS
)
1126 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1127 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1129 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1131 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1132 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1133 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1138 /* Otherwise add an alias for the temp's address. */
1139 insert_temp_slot_address (new_rtx
, p
);
1142 /* If X could be a reference to a temporary slot, mark the fact that its
1143 address was taken. */
1146 mark_temp_addr_taken (rtx x
)
1148 struct temp_slot
*p
;
1153 /* If X is not in memory or is at a constant address, it cannot be in
1154 a temporary slot. */
1155 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1158 p
= find_temp_slot_from_address (XEXP (x
, 0));
1163 /* If X could be a reference to a temporary slot, mark that slot as
1164 belonging to the to one level higher than the current level. If X
1165 matched one of our slots, just mark that one. Otherwise, we can't
1166 easily predict which it is, so upgrade all of them. Kept slots
1167 need not be touched.
1169 This is called when an ({...}) construct occurs and a statement
1170 returns a value in memory. */
1173 preserve_temp_slots (rtx x
)
1175 struct temp_slot
*p
= 0, *next
;
1177 /* If there is no result, we still might have some objects whose address
1178 were taken, so we need to make sure they stay around. */
1181 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1186 move_slot_to_level (p
, temp_slot_level
- 1);
1192 /* If X is a register that is being used as a pointer, see if we have
1193 a temporary slot we know it points to. To be consistent with
1194 the code below, we really should preserve all non-kept slots
1195 if we can't find a match, but that seems to be much too costly. */
1196 if (REG_P (x
) && REG_POINTER (x
))
1197 p
= find_temp_slot_from_address (x
);
1199 /* If X is not in memory or is at a constant address, it cannot be in
1200 a temporary slot, but it can contain something whose address was
1202 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1204 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1209 move_slot_to_level (p
, temp_slot_level
- 1);
1215 /* First see if we can find a match. */
1217 p
= find_temp_slot_from_address (XEXP (x
, 0));
1221 /* Move everything at our level whose address was taken to our new
1222 level in case we used its address. */
1223 struct temp_slot
*q
;
1225 if (p
->level
== temp_slot_level
)
1227 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1231 if (p
!= q
&& q
->addr_taken
)
1232 move_slot_to_level (q
, temp_slot_level
- 1);
1235 move_slot_to_level (p
, temp_slot_level
- 1);
1241 /* Otherwise, preserve all non-kept slots at this level. */
1242 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1247 move_slot_to_level (p
, temp_slot_level
- 1);
1251 /* Free all temporaries used so far. This is normally called at the
1252 end of generating code for a statement. */
1255 free_temp_slots (void)
1257 struct temp_slot
*p
, *next
;
1258 bool some_available
= false;
1260 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1266 make_slot_available (p
);
1267 some_available
= true;
1273 remove_unused_temp_slot_addresses ();
1274 combine_temp_slots ();
1278 /* Push deeper into the nesting level for stack temporaries. */
1281 push_temp_slots (void)
1286 /* Pop a temporary nesting level. All slots in use in the current level
1290 pop_temp_slots (void)
1292 struct temp_slot
*p
, *next
;
1293 bool some_available
= false;
1295 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1298 make_slot_available (p
);
1299 some_available
= true;
1304 remove_unused_temp_slot_addresses ();
1305 combine_temp_slots ();
1311 /* Initialize temporary slots. */
1314 init_temp_slots (void)
1316 /* We have not allocated any temporaries yet. */
1317 avail_temp_slots
= 0;
1318 used_temp_slots
= 0;
1319 temp_slot_level
= 0;
1321 /* Set up the table to map addresses to temp slots. */
1322 if (! temp_slot_address_table
)
1323 temp_slot_address_table
= htab_create_ggc (32,
1324 temp_slot_address_hash
,
1325 temp_slot_address_eq
,
1328 htab_empty (temp_slot_address_table
);
1331 /* These routines are responsible for converting virtual register references
1332 to the actual hard register references once RTL generation is complete.
1334 The following four variables are used for communication between the
1335 routines. They contain the offsets of the virtual registers from their
1336 respective hard registers. */
1338 static int in_arg_offset
;
1339 static int var_offset
;
1340 static int dynamic_offset
;
1341 static int out_arg_offset
;
1342 static int cfa_offset
;
1344 /* In most machines, the stack pointer register is equivalent to the bottom
1347 #ifndef STACK_POINTER_OFFSET
1348 #define STACK_POINTER_OFFSET 0
1351 /* If not defined, pick an appropriate default for the offset of dynamically
1352 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1353 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1355 #ifndef STACK_DYNAMIC_OFFSET
1357 /* The bottom of the stack points to the actual arguments. If
1358 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1359 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1360 stack space for register parameters is not pushed by the caller, but
1361 rather part of the fixed stack areas and hence not included in
1362 `crtl->outgoing_args_size'. Nevertheless, we must allow
1363 for it when allocating stack dynamic objects. */
1365 #if defined(REG_PARM_STACK_SPACE)
1366 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1367 ((ACCUMULATE_OUTGOING_ARGS \
1368 ? (crtl->outgoing_args_size \
1369 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1370 : REG_PARM_STACK_SPACE (FNDECL))) \
1371 : 0) + (STACK_POINTER_OFFSET))
1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1374 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1375 + (STACK_POINTER_OFFSET))
1380 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1381 is a virtual register, return the equivalent hard register and set the
1382 offset indirectly through the pointer. Otherwise, return 0. */
1385 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1388 HOST_WIDE_INT offset
;
1390 if (x
== virtual_incoming_args_rtx
)
1392 if (stack_realign_drap
)
1394 /* Replace virtual_incoming_args_rtx with internal arg
1395 pointer if DRAP is used to realign stack. */
1396 new_rtx
= crtl
->args
.internal_arg_pointer
;
1400 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1402 else if (x
== virtual_stack_vars_rtx
)
1403 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1404 else if (x
== virtual_stack_dynamic_rtx
)
1405 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1406 else if (x
== virtual_outgoing_args_rtx
)
1407 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1408 else if (x
== virtual_cfa_rtx
)
1410 #ifdef FRAME_POINTER_CFA_OFFSET
1411 new_rtx
= frame_pointer_rtx
;
1413 new_rtx
= arg_pointer_rtx
;
1415 offset
= cfa_offset
;
1417 else if (x
== virtual_preferred_stack_boundary_rtx
)
1419 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1429 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1430 Instantiate any virtual registers present inside of *LOC. The expression
1431 is simplified, as much as possible, but is not to be considered "valid"
1432 in any sense implied by the target. If any change is made, set CHANGED
1436 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1438 HOST_WIDE_INT offset
;
1439 bool *changed
= (bool *) data
;
1446 switch (GET_CODE (x
))
1449 new_rtx
= instantiate_new_reg (x
, &offset
);
1452 *loc
= plus_constant (new_rtx
, offset
);
1459 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1462 new_rtx
= plus_constant (new_rtx
, offset
);
1463 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1469 /* FIXME -- from old code */
1470 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1471 we can commute the PLUS and SUBREG because pointers into the
1472 frame are well-behaved. */
1482 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1483 matches the predicate for insn CODE operand OPERAND. */
1486 safe_insn_predicate (int code
, int operand
, rtx x
)
1488 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1491 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1492 registers present inside of insn. The result will be a valid insn. */
1495 instantiate_virtual_regs_in_insn (rtx insn
)
1497 HOST_WIDE_INT offset
;
1499 bool any_change
= false;
1500 rtx set
, new_rtx
, x
, seq
;
1502 /* There are some special cases to be handled first. */
1503 set
= single_set (insn
);
1506 /* We're allowed to assign to a virtual register. This is interpreted
1507 to mean that the underlying register gets assigned the inverse
1508 transformation. This is used, for example, in the handling of
1510 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1515 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1516 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1518 x
= force_operand (x
, new_rtx
);
1520 emit_move_insn (new_rtx
, x
);
1525 emit_insn_before (seq
, insn
);
1530 /* Handle a straight copy from a virtual register by generating a
1531 new add insn. The difference between this and falling through
1532 to the generic case is avoiding a new pseudo and eliminating a
1533 move insn in the initial rtl stream. */
1534 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1535 if (new_rtx
&& offset
!= 0
1536 && REG_P (SET_DEST (set
))
1537 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1541 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1542 new_rtx
, GEN_INT (offset
), SET_DEST (set
),
1543 1, OPTAB_LIB_WIDEN
);
1544 if (x
!= SET_DEST (set
))
1545 emit_move_insn (SET_DEST (set
), x
);
1550 emit_insn_before (seq
, insn
);
1555 extract_insn (insn
);
1556 insn_code
= INSN_CODE (insn
);
1558 /* Handle a plus involving a virtual register by determining if the
1559 operands remain valid if they're modified in place. */
1560 if (GET_CODE (SET_SRC (set
)) == PLUS
1561 && recog_data
.n_operands
>= 3
1562 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1563 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1564 && CONST_INT_P (recog_data
.operand
[2])
1565 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1567 offset
+= INTVAL (recog_data
.operand
[2]);
1569 /* If the sum is zero, then replace with a plain move. */
1571 && REG_P (SET_DEST (set
))
1572 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1575 emit_move_insn (SET_DEST (set
), new_rtx
);
1579 emit_insn_before (seq
, insn
);
1584 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1586 /* Using validate_change and apply_change_group here leaves
1587 recog_data in an invalid state. Since we know exactly what
1588 we want to check, do those two by hand. */
1589 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1590 && safe_insn_predicate (insn_code
, 2, x
))
1592 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1593 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1596 /* Fall through into the regular operand fixup loop in
1597 order to take care of operands other than 1 and 2. */
1603 extract_insn (insn
);
1604 insn_code
= INSN_CODE (insn
);
1607 /* In the general case, we expect virtual registers to appear only in
1608 operands, and then only as either bare registers or inside memories. */
1609 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1611 x
= recog_data
.operand
[i
];
1612 switch (GET_CODE (x
))
1616 rtx addr
= XEXP (x
, 0);
1617 bool changed
= false;
1619 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1624 x
= replace_equiv_address (x
, addr
);
1625 /* It may happen that the address with the virtual reg
1626 was valid (e.g. based on the virtual stack reg, which might
1627 be acceptable to the predicates with all offsets), whereas
1628 the address now isn't anymore, for instance when the address
1629 is still offsetted, but the base reg isn't virtual-stack-reg
1630 anymore. Below we would do a force_reg on the whole operand,
1631 but this insn might actually only accept memory. Hence,
1632 before doing that last resort, try to reload the address into
1633 a register, so this operand stays a MEM. */
1634 if (!safe_insn_predicate (insn_code
, i
, x
))
1636 addr
= force_reg (GET_MODE (addr
), addr
);
1637 x
= replace_equiv_address (x
, addr
);
1642 emit_insn_before (seq
, insn
);
1647 new_rtx
= instantiate_new_reg (x
, &offset
);
1648 if (new_rtx
== NULL
)
1656 /* Careful, special mode predicates may have stuff in
1657 insn_data[insn_code].operand[i].mode that isn't useful
1658 to us for computing a new value. */
1659 /* ??? Recognize address_operand and/or "p" constraints
1660 to see if (plus new offset) is a valid before we put
1661 this through expand_simple_binop. */
1662 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1663 GEN_INT (offset
), NULL_RTX
,
1664 1, OPTAB_LIB_WIDEN
);
1667 emit_insn_before (seq
, insn
);
1672 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1673 if (new_rtx
== NULL
)
1678 new_rtx
= expand_simple_binop (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1679 GEN_INT (offset
), NULL_RTX
,
1680 1, OPTAB_LIB_WIDEN
);
1683 emit_insn_before (seq
, insn
);
1685 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1686 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1694 /* At this point, X contains the new value for the operand.
1695 Validate the new value vs the insn predicate. Note that
1696 asm insns will have insn_code -1 here. */
1697 if (!safe_insn_predicate (insn_code
, i
, x
))
1702 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1703 x
= copy_to_reg (x
);
1706 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1710 emit_insn_before (seq
, insn
);
1713 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1719 /* Propagate operand changes into the duplicates. */
1720 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1721 *recog_data
.dup_loc
[i
]
1722 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1724 /* Force re-recognition of the instruction for validation. */
1725 INSN_CODE (insn
) = -1;
1728 if (asm_noperands (PATTERN (insn
)) >= 0)
1730 if (!check_asm_operands (PATTERN (insn
)))
1732 error_for_asm (insn
, "impossible constraint in %<asm%>");
1733 delete_insn_and_edges (insn
);
1738 if (recog_memoized (insn
) < 0)
1739 fatal_insn_not_found (insn
);
1743 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1744 do any instantiation required. */
1747 instantiate_decl_rtl (rtx x
)
1754 /* If this is a CONCAT, recurse for the pieces. */
1755 if (GET_CODE (x
) == CONCAT
)
1757 instantiate_decl_rtl (XEXP (x
, 0));
1758 instantiate_decl_rtl (XEXP (x
, 1));
1762 /* If this is not a MEM, no need to do anything. Similarly if the
1763 address is a constant or a register that is not a virtual register. */
1768 if (CONSTANT_P (addr
)
1770 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1771 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1774 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1777 /* Helper for instantiate_decls called via walk_tree: Process all decls
1778 in the given DECL_VALUE_EXPR. */
1781 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1789 if (DECL_RTL_SET_P (t
))
1790 instantiate_decl_rtl (DECL_RTL (t
));
1791 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1792 && DECL_INCOMING_RTL (t
))
1793 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1794 if ((TREE_CODE (t
) == VAR_DECL
1795 || TREE_CODE (t
) == RESULT_DECL
)
1796 && DECL_HAS_VALUE_EXPR_P (t
))
1798 tree v
= DECL_VALUE_EXPR (t
);
1799 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1806 /* Subroutine of instantiate_decls: Process all decls in the given
1807 BLOCK node and all its subblocks. */
1810 instantiate_decls_1 (tree let
)
1814 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1816 if (DECL_RTL_SET_P (t
))
1817 instantiate_decl_rtl (DECL_RTL (t
));
1818 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1820 tree v
= DECL_VALUE_EXPR (t
);
1821 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1825 /* Process all subblocks. */
1826 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1827 instantiate_decls_1 (t
);
1830 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1831 all virtual registers in their DECL_RTL's. */
1834 instantiate_decls (tree fndecl
)
1839 /* Process all parameters of the function. */
1840 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1842 instantiate_decl_rtl (DECL_RTL (decl
));
1843 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1844 if (DECL_HAS_VALUE_EXPR_P (decl
))
1846 tree v
= DECL_VALUE_EXPR (decl
);
1847 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1851 if ((decl
= DECL_RESULT (fndecl
))
1852 && TREE_CODE (decl
) == RESULT_DECL
)
1854 if (DECL_RTL_SET_P (decl
))
1855 instantiate_decl_rtl (DECL_RTL (decl
));
1856 if (DECL_HAS_VALUE_EXPR_P (decl
))
1858 tree v
= DECL_VALUE_EXPR (decl
);
1859 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1863 /* Now process all variables defined in the function or its subblocks. */
1864 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1866 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1867 if (DECL_RTL_SET_P (decl
))
1868 instantiate_decl_rtl (DECL_RTL (decl
));
1869 VEC_free (tree
, gc
, cfun
->local_decls
);
1872 /* Pass through the INSNS of function FNDECL and convert virtual register
1873 references to hard register references. */
1876 instantiate_virtual_regs (void)
1880 /* Compute the offsets to use for this function. */
1881 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1882 var_offset
= STARTING_FRAME_OFFSET
;
1883 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1884 out_arg_offset
= STACK_POINTER_OFFSET
;
1885 #ifdef FRAME_POINTER_CFA_OFFSET
1886 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1888 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1891 /* Initialize recognition, indicating that volatile is OK. */
1894 /* Scan through all the insns, instantiating every virtual register still
1896 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1899 /* These patterns in the instruction stream can never be recognized.
1900 Fortunately, they shouldn't contain virtual registers either. */
1901 if (GET_CODE (PATTERN (insn
)) == USE
1902 || GET_CODE (PATTERN (insn
)) == CLOBBER
1903 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1904 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1905 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1907 else if (DEBUG_INSN_P (insn
))
1908 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1909 instantiate_virtual_regs_in_rtx
, NULL
);
1911 instantiate_virtual_regs_in_insn (insn
);
1913 if (INSN_DELETED_P (insn
))
1916 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1918 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1920 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1921 instantiate_virtual_regs_in_rtx
, NULL
);
1924 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1925 instantiate_decls (current_function_decl
);
1927 targetm
.instantiate_decls ();
1929 /* Indicate that, from now on, assign_stack_local should use
1930 frame_pointer_rtx. */
1931 virtuals_instantiated
= 1;
1936 struct rtl_opt_pass pass_instantiate_virtual_regs
=
1942 instantiate_virtual_regs
, /* execute */
1945 0, /* static_pass_number */
1946 TV_NONE
, /* tv_id */
1947 0, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 0 /* todo_flags_finish */
1956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1957 This means a type for which function calls must pass an address to the
1958 function or get an address back from the function.
1959 EXP may be a type node or an expression (whose type is tested). */
1962 aggregate_value_p (const_tree exp
, const_tree fntype
)
1964 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1965 int i
, regno
, nregs
;
1969 switch (TREE_CODE (fntype
))
1973 tree fndecl
= get_callee_fndecl (fntype
);
1975 ? TREE_TYPE (fndecl
)
1976 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
1980 fntype
= TREE_TYPE (fntype
);
1985 case IDENTIFIER_NODE
:
1989 /* We don't expect other tree types here. */
1993 if (VOID_TYPE_P (type
))
1996 /* If a record should be passed the same as its first (and only) member
1997 don't pass it as an aggregate. */
1998 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
1999 return aggregate_value_p (first_field (type
), fntype
);
2001 /* If the front end has decided that this needs to be passed by
2002 reference, do so. */
2003 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2004 && DECL_BY_REFERENCE (exp
))
2007 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2008 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2011 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2012 and thus can't be returned in registers. */
2013 if (TREE_ADDRESSABLE (type
))
2016 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2019 if (targetm
.calls
.return_in_memory (type
, fntype
))
2022 /* Make sure we have suitable call-clobbered regs to return
2023 the value in; if not, we must return it in memory. */
2024 reg
= hard_function_value (type
, 0, fntype
, 0);
2026 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2031 regno
= REGNO (reg
);
2032 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2033 for (i
= 0; i
< nregs
; i
++)
2034 if (! call_used_regs
[regno
+ i
])
2040 /* Return true if we should assign DECL a pseudo register; false if it
2041 should live on the local stack. */
2044 use_register_for_decl (const_tree decl
)
2046 if (!targetm
.calls
.allocate_stack_slots_for_args())
2049 /* Honor volatile. */
2050 if (TREE_SIDE_EFFECTS (decl
))
2053 /* Honor addressability. */
2054 if (TREE_ADDRESSABLE (decl
))
2057 /* Only register-like things go in registers. */
2058 if (DECL_MODE (decl
) == BLKmode
)
2061 /* If -ffloat-store specified, don't put explicit float variables
2063 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2064 propagates values across these stores, and it probably shouldn't. */
2065 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2068 /* If we're not interested in tracking debugging information for
2069 this decl, then we can certainly put it in a register. */
2070 if (DECL_IGNORED_P (decl
))
2076 if (!DECL_REGISTER (decl
))
2079 switch (TREE_CODE (TREE_TYPE (decl
)))
2083 case QUAL_UNION_TYPE
:
2084 /* When not optimizing, disregard register keyword for variables with
2085 types containing methods, otherwise the methods won't be callable
2086 from the debugger. */
2087 if (TYPE_METHODS (TREE_TYPE (decl
)))
2097 /* Return true if TYPE should be passed by invisible reference. */
2100 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2101 tree type
, bool named_arg
)
2105 /* If this type contains non-trivial constructors, then it is
2106 forbidden for the middle-end to create any new copies. */
2107 if (TREE_ADDRESSABLE (type
))
2110 /* GCC post 3.4 passes *all* variable sized types by reference. */
2111 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2114 /* If a record type should be passed the same as its first (and only)
2115 member, use the type and mode of that member. */
2116 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2118 type
= TREE_TYPE (first_field (type
));
2119 mode
= TYPE_MODE (type
);
2123 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2127 /* Return true if TYPE, which is passed by reference, should be callee
2128 copied instead of caller copied. */
2131 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2132 tree type
, bool named_arg
)
2134 if (type
&& TREE_ADDRESSABLE (type
))
2136 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2140 /* Structures to communicate between the subroutines of assign_parms.
2141 The first holds data persistent across all parameters, the second
2142 is cleared out for each parameter. */
2144 struct assign_parm_data_all
2146 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2147 should become a job of the target or otherwise encapsulated. */
2148 CUMULATIVE_ARGS args_so_far_v
;
2149 cumulative_args_t args_so_far
;
2150 struct args_size stack_args_size
;
2151 tree function_result_decl
;
2153 rtx first_conversion_insn
;
2154 rtx last_conversion_insn
;
2155 HOST_WIDE_INT pretend_args_size
;
2156 HOST_WIDE_INT extra_pretend_bytes
;
2157 int reg_parm_stack_space
;
2160 struct assign_parm_data_one
2166 enum machine_mode nominal_mode
;
2167 enum machine_mode passed_mode
;
2168 enum machine_mode promoted_mode
;
2169 struct locate_and_pad_arg_data locate
;
2171 BOOL_BITFIELD named_arg
: 1;
2172 BOOL_BITFIELD passed_pointer
: 1;
2173 BOOL_BITFIELD on_stack
: 1;
2174 BOOL_BITFIELD loaded_in_reg
: 1;
2177 /* A subroutine of assign_parms. Initialize ALL. */
2180 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2182 tree fntype ATTRIBUTE_UNUSED
;
2184 memset (all
, 0, sizeof (*all
));
2186 fntype
= TREE_TYPE (current_function_decl
);
2188 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2189 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2191 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2192 current_function_decl
, -1);
2194 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2196 #ifdef REG_PARM_STACK_SPACE
2197 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2201 /* If ARGS contains entries with complex types, split the entry into two
2202 entries of the component type. Return a new list of substitutions are
2203 needed, else the old list. */
2206 split_complex_args (VEC(tree
, heap
) **args
)
2211 FOR_EACH_VEC_ELT (tree
, *args
, i
, p
)
2213 tree type
= TREE_TYPE (p
);
2214 if (TREE_CODE (type
) == COMPLEX_TYPE
2215 && targetm
.calls
.split_complex_arg (type
))
2218 tree subtype
= TREE_TYPE (type
);
2219 bool addressable
= TREE_ADDRESSABLE (p
);
2221 /* Rewrite the PARM_DECL's type with its component. */
2223 TREE_TYPE (p
) = subtype
;
2224 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2225 DECL_MODE (p
) = VOIDmode
;
2226 DECL_SIZE (p
) = NULL
;
2227 DECL_SIZE_UNIT (p
) = NULL
;
2228 /* If this arg must go in memory, put it in a pseudo here.
2229 We can't allow it to go in memory as per normal parms,
2230 because the usual place might not have the imag part
2231 adjacent to the real part. */
2232 DECL_ARTIFICIAL (p
) = addressable
;
2233 DECL_IGNORED_P (p
) = addressable
;
2234 TREE_ADDRESSABLE (p
) = 0;
2236 VEC_replace (tree
, *args
, i
, p
);
2238 /* Build a second synthetic decl. */
2239 decl
= build_decl (EXPR_LOCATION (p
),
2240 PARM_DECL
, NULL_TREE
, subtype
);
2241 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2242 DECL_ARTIFICIAL (decl
) = addressable
;
2243 DECL_IGNORED_P (decl
) = addressable
;
2244 layout_decl (decl
, 0);
2245 VEC_safe_insert (tree
, heap
, *args
, ++i
, decl
);
2250 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2251 the hidden struct return argument, and (abi willing) complex args.
2252 Return the new parameter list. */
2254 static VEC(tree
, heap
) *
2255 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2257 tree fndecl
= current_function_decl
;
2258 tree fntype
= TREE_TYPE (fndecl
);
2259 VEC(tree
, heap
) *fnargs
= NULL
;
2262 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2263 VEC_safe_push (tree
, heap
, fnargs
, arg
);
2265 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2267 /* If struct value address is treated as the first argument, make it so. */
2268 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2269 && ! cfun
->returns_pcc_struct
2270 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2272 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2275 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2276 PARM_DECL
, get_identifier (".result_ptr"), type
);
2277 DECL_ARG_TYPE (decl
) = type
;
2278 DECL_ARTIFICIAL (decl
) = 1;
2279 DECL_NAMELESS (decl
) = 1;
2280 TREE_CONSTANT (decl
) = 1;
2282 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2283 all
->orig_fnargs
= decl
;
2284 VEC_safe_insert (tree
, heap
, fnargs
, 0, decl
);
2286 all
->function_result_decl
= decl
;
2289 /* If the target wants to split complex arguments into scalars, do so. */
2290 if (targetm
.calls
.split_complex_arg
)
2291 split_complex_args (&fnargs
);
2296 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2297 data for the parameter. Incorporate ABI specifics such as pass-by-
2298 reference and type promotion. */
2301 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2302 struct assign_parm_data_one
*data
)
2304 tree nominal_type
, passed_type
;
2305 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2308 memset (data
, 0, sizeof (*data
));
2310 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2312 data
->named_arg
= 1; /* No variadic parms. */
2313 else if (DECL_CHAIN (parm
))
2314 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2315 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2316 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2318 data
->named_arg
= 0; /* Treat as variadic. */
2320 nominal_type
= TREE_TYPE (parm
);
2321 passed_type
= DECL_ARG_TYPE (parm
);
2323 /* Look out for errors propagating this far. Also, if the parameter's
2324 type is void then its value doesn't matter. */
2325 if (TREE_TYPE (parm
) == error_mark_node
2326 /* This can happen after weird syntax errors
2327 or if an enum type is defined among the parms. */
2328 || TREE_CODE (parm
) != PARM_DECL
2329 || passed_type
== NULL
2330 || VOID_TYPE_P (nominal_type
))
2332 nominal_type
= passed_type
= void_type_node
;
2333 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2337 /* Find mode of arg as it is passed, and mode of arg as it should be
2338 during execution of this function. */
2339 passed_mode
= TYPE_MODE (passed_type
);
2340 nominal_mode
= TYPE_MODE (nominal_type
);
2342 /* If the parm is to be passed as a transparent union or record, use the
2343 type of the first field for the tests below. We have already verified
2344 that the modes are the same. */
2345 if ((TREE_CODE (passed_type
) == UNION_TYPE
2346 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2347 && TYPE_TRANSPARENT_AGGR (passed_type
))
2348 passed_type
= TREE_TYPE (first_field (passed_type
));
2350 /* See if this arg was passed by invisible reference. */
2351 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2352 passed_type
, data
->named_arg
))
2354 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2355 data
->passed_pointer
= true;
2356 passed_mode
= nominal_mode
= Pmode
;
2359 /* Find mode as it is passed by the ABI. */
2360 unsignedp
= TYPE_UNSIGNED (passed_type
);
2361 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2362 TREE_TYPE (current_function_decl
), 0);
2365 data
->nominal_type
= nominal_type
;
2366 data
->passed_type
= passed_type
;
2367 data
->nominal_mode
= nominal_mode
;
2368 data
->passed_mode
= passed_mode
;
2369 data
->promoted_mode
= promoted_mode
;
2372 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2375 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2376 struct assign_parm_data_one
*data
, bool no_rtl
)
2378 int varargs_pretend_bytes
= 0;
2380 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2381 data
->promoted_mode
,
2383 &varargs_pretend_bytes
, no_rtl
);
2385 /* If the back-end has requested extra stack space, record how much is
2386 needed. Do not change pretend_args_size otherwise since it may be
2387 nonzero from an earlier partial argument. */
2388 if (varargs_pretend_bytes
> 0)
2389 all
->pretend_args_size
= varargs_pretend_bytes
;
2392 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2393 the incoming location of the current parameter. */
2396 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2397 struct assign_parm_data_one
*data
)
2399 HOST_WIDE_INT pretend_bytes
= 0;
2403 if (data
->promoted_mode
== VOIDmode
)
2405 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2409 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2410 data
->promoted_mode
,
2414 if (entry_parm
== 0)
2415 data
->promoted_mode
= data
->passed_mode
;
2417 /* Determine parm's home in the stack, in case it arrives in the stack
2418 or we should pretend it did. Compute the stack position and rtx where
2419 the argument arrives and its size.
2421 There is one complexity here: If this was a parameter that would
2422 have been passed in registers, but wasn't only because it is
2423 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2424 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2425 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2426 as it was the previous time. */
2427 in_regs
= entry_parm
!= 0;
2428 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2431 if (!in_regs
&& !data
->named_arg
)
2433 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2436 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2437 data
->promoted_mode
,
2438 data
->passed_type
, true);
2439 in_regs
= tem
!= NULL
;
2443 /* If this parameter was passed both in registers and in the stack, use
2444 the copy on the stack. */
2445 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2453 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2454 data
->promoted_mode
,
2457 data
->partial
= partial
;
2459 /* The caller might already have allocated stack space for the
2460 register parameters. */
2461 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2463 /* Part of this argument is passed in registers and part
2464 is passed on the stack. Ask the prologue code to extend
2465 the stack part so that we can recreate the full value.
2467 PRETEND_BYTES is the size of the registers we need to store.
2468 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2469 stack space that the prologue should allocate.
2471 Internally, gcc assumes that the argument pointer is aligned
2472 to STACK_BOUNDARY bits. This is used both for alignment
2473 optimizations (see init_emit) and to locate arguments that are
2474 aligned to more than PARM_BOUNDARY bits. We must preserve this
2475 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2476 a stack boundary. */
2478 /* We assume at most one partial arg, and it must be the first
2479 argument on the stack. */
2480 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2482 pretend_bytes
= partial
;
2483 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2485 /* We want to align relative to the actual stack pointer, so
2486 don't include this in the stack size until later. */
2487 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2491 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2492 entry_parm
? data
->partial
: 0, current_function_decl
,
2493 &all
->stack_args_size
, &data
->locate
);
2495 /* Update parm_stack_boundary if this parameter is passed in the
2497 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2498 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2500 /* Adjust offsets to include the pretend args. */
2501 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2502 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2503 data
->locate
.offset
.constant
+= pretend_bytes
;
2505 data
->entry_parm
= entry_parm
;
2508 /* A subroutine of assign_parms. If there is actually space on the stack
2509 for this parm, count it in stack_args_size and return true. */
2512 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2513 struct assign_parm_data_one
*data
)
2515 /* Trivially true if we've no incoming register. */
2516 if (data
->entry_parm
== NULL
)
2518 /* Also true if we're partially in registers and partially not,
2519 since we've arranged to drop the entire argument on the stack. */
2520 else if (data
->partial
!= 0)
2522 /* Also true if the target says that it's passed in both registers
2523 and on the stack. */
2524 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2525 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2527 /* Also true if the target says that there's stack allocated for
2528 all register parameters. */
2529 else if (all
->reg_parm_stack_space
> 0)
2531 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2535 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2536 if (data
->locate
.size
.var
)
2537 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2542 /* A subroutine of assign_parms. Given that this parameter is allocated
2543 stack space by the ABI, find it. */
2546 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2548 rtx offset_rtx
, stack_parm
;
2549 unsigned int align
, boundary
;
2551 /* If we're passing this arg using a reg, make its stack home the
2552 aligned stack slot. */
2553 if (data
->entry_parm
)
2554 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2556 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2558 stack_parm
= crtl
->args
.internal_arg_pointer
;
2559 if (offset_rtx
!= const0_rtx
)
2560 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2561 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2563 if (!data
->passed_pointer
)
2565 set_mem_attributes (stack_parm
, parm
, 1);
2566 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2567 while promoted mode's size is needed. */
2568 if (data
->promoted_mode
!= BLKmode
2569 && data
->promoted_mode
!= DECL_MODE (parm
))
2571 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2572 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2574 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2575 data
->promoted_mode
);
2577 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2582 boundary
= data
->locate
.boundary
;
2583 align
= BITS_PER_UNIT
;
2585 /* If we're padding upward, we know that the alignment of the slot
2586 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2587 intentionally forcing upward padding. Otherwise we have to come
2588 up with a guess at the alignment based on OFFSET_RTX. */
2589 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2591 else if (CONST_INT_P (offset_rtx
))
2593 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2594 align
= align
& -align
;
2596 set_mem_align (stack_parm
, align
);
2598 if (data
->entry_parm
)
2599 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2601 data
->stack_parm
= stack_parm
;
2604 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2605 always valid and contiguous. */
2608 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2610 rtx entry_parm
= data
->entry_parm
;
2611 rtx stack_parm
= data
->stack_parm
;
2613 /* If this parm was passed part in regs and part in memory, pretend it
2614 arrived entirely in memory by pushing the register-part onto the stack.
2615 In the special case of a DImode or DFmode that is split, we could put
2616 it together in a pseudoreg directly, but for now that's not worth
2618 if (data
->partial
!= 0)
2620 /* Handle calls that pass values in multiple non-contiguous
2621 locations. The Irix 6 ABI has examples of this. */
2622 if (GET_CODE (entry_parm
) == PARALLEL
)
2623 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2625 int_size_in_bytes (data
->passed_type
));
2628 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2629 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2630 data
->partial
/ UNITS_PER_WORD
);
2633 entry_parm
= stack_parm
;
2636 /* If we didn't decide this parm came in a register, by default it came
2638 else if (entry_parm
== NULL
)
2639 entry_parm
= stack_parm
;
2641 /* When an argument is passed in multiple locations, we can't make use
2642 of this information, but we can save some copying if the whole argument
2643 is passed in a single register. */
2644 else if (GET_CODE (entry_parm
) == PARALLEL
2645 && data
->nominal_mode
!= BLKmode
2646 && data
->passed_mode
!= BLKmode
)
2648 size_t i
, len
= XVECLEN (entry_parm
, 0);
2650 for (i
= 0; i
< len
; i
++)
2651 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2652 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2653 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2654 == data
->passed_mode
)
2655 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2657 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2662 data
->entry_parm
= entry_parm
;
2665 /* A subroutine of assign_parms. Reconstitute any values which were
2666 passed in multiple registers and would fit in a single register. */
2669 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2671 rtx entry_parm
= data
->entry_parm
;
2673 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2674 This can be done with register operations rather than on the
2675 stack, even if we will store the reconstituted parameter on the
2677 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2679 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2680 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2681 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2682 entry_parm
= parmreg
;
2685 data
->entry_parm
= entry_parm
;
2688 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2689 always valid and properly aligned. */
2692 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2694 rtx stack_parm
= data
->stack_parm
;
2696 /* If we can't trust the parm stack slot to be aligned enough for its
2697 ultimate type, don't use that slot after entry. We'll make another
2698 stack slot, if we need one. */
2700 && ((STRICT_ALIGNMENT
2701 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2702 || (data
->nominal_type
2703 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2704 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2707 /* If parm was passed in memory, and we need to convert it on entry,
2708 don't store it back in that same slot. */
2709 else if (data
->entry_parm
== stack_parm
2710 && data
->nominal_mode
!= BLKmode
2711 && data
->nominal_mode
!= data
->passed_mode
)
2714 /* If stack protection is in effect for this function, don't leave any
2715 pointers in their passed stack slots. */
2716 else if (crtl
->stack_protect_guard
2717 && (flag_stack_protect
== 2
2718 || data
->passed_pointer
2719 || POINTER_TYPE_P (data
->nominal_type
)))
2722 data
->stack_parm
= stack_parm
;
2725 /* A subroutine of assign_parms. Return true if the current parameter
2726 should be stored as a BLKmode in the current frame. */
2729 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2731 if (data
->nominal_mode
== BLKmode
)
2733 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2736 #ifdef BLOCK_REG_PADDING
2737 /* Only assign_parm_setup_block knows how to deal with register arguments
2738 that are padded at the least significant end. */
2739 if (REG_P (data
->entry_parm
)
2740 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2741 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2742 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2749 /* A subroutine of assign_parms. Arrange for the parameter to be
2750 present and valid in DATA->STACK_RTL. */
2753 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2754 tree parm
, struct assign_parm_data_one
*data
)
2756 rtx entry_parm
= data
->entry_parm
;
2757 rtx stack_parm
= data
->stack_parm
;
2759 HOST_WIDE_INT size_stored
;
2761 if (GET_CODE (entry_parm
) == PARALLEL
)
2762 entry_parm
= emit_group_move_into_temps (entry_parm
);
2764 size
= int_size_in_bytes (data
->passed_type
);
2765 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2766 if (stack_parm
== 0)
2768 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2769 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2771 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2772 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2773 set_mem_attributes (stack_parm
, parm
, 1);
2776 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2777 calls that pass values in multiple non-contiguous locations. */
2778 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2782 /* Note that we will be storing an integral number of words.
2783 So we have to be careful to ensure that we allocate an
2784 integral number of words. We do this above when we call
2785 assign_stack_local if space was not allocated in the argument
2786 list. If it was, this will not work if PARM_BOUNDARY is not
2787 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2788 if it becomes a problem. Exception is when BLKmode arrives
2789 with arguments not conforming to word_mode. */
2791 if (data
->stack_parm
== 0)
2793 else if (GET_CODE (entry_parm
) == PARALLEL
)
2796 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2798 mem
= validize_mem (stack_parm
);
2800 /* Handle values in multiple non-contiguous locations. */
2801 if (GET_CODE (entry_parm
) == PARALLEL
)
2803 push_to_sequence2 (all
->first_conversion_insn
,
2804 all
->last_conversion_insn
);
2805 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2806 all
->first_conversion_insn
= get_insns ();
2807 all
->last_conversion_insn
= get_last_insn ();
2814 /* If SIZE is that of a mode no bigger than a word, just use
2815 that mode's store operation. */
2816 else if (size
<= UNITS_PER_WORD
)
2818 enum machine_mode mode
2819 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2822 #ifdef BLOCK_REG_PADDING
2823 && (size
== UNITS_PER_WORD
2824 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2825 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2831 /* We are really truncating a word_mode value containing
2832 SIZE bytes into a value of mode MODE. If such an
2833 operation requires no actual instructions, we can refer
2834 to the value directly in mode MODE, otherwise we must
2835 start with the register in word_mode and explicitly
2837 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2838 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2841 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2842 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2844 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2847 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2848 machine must be aligned to the left before storing
2849 to memory. Note that the previous test doesn't
2850 handle all cases (e.g. SIZE == 3). */
2851 else if (size
!= UNITS_PER_WORD
2852 #ifdef BLOCK_REG_PADDING
2853 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2861 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2862 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2864 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2865 tem
= change_address (mem
, word_mode
, 0);
2866 emit_move_insn (tem
, x
);
2869 move_block_from_reg (REGNO (entry_parm
), mem
,
2870 size_stored
/ UNITS_PER_WORD
);
2873 move_block_from_reg (REGNO (entry_parm
), mem
,
2874 size_stored
/ UNITS_PER_WORD
);
2876 else if (data
->stack_parm
== 0)
2878 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2879 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2881 all
->first_conversion_insn
= get_insns ();
2882 all
->last_conversion_insn
= get_last_insn ();
2886 data
->stack_parm
= stack_parm
;
2887 SET_DECL_RTL (parm
, stack_parm
);
2890 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2891 parameter. Get it there. Perform all ABI specified conversions. */
2894 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2895 struct assign_parm_data_one
*data
)
2897 rtx parmreg
, validated_mem
;
2898 rtx equiv_stack_parm
;
2899 enum machine_mode promoted_nominal_mode
;
2900 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2901 bool did_conversion
= false;
2902 bool need_conversion
, moved
;
2904 /* Store the parm in a pseudoregister during the function, but we may
2905 need to do it in a wider mode. Using 2 here makes the result
2906 consistent with promote_decl_mode and thus expand_expr_real_1. */
2907 promoted_nominal_mode
2908 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2909 TREE_TYPE (current_function_decl
), 2);
2911 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2913 if (!DECL_ARTIFICIAL (parm
))
2914 mark_user_reg (parmreg
);
2916 /* If this was an item that we received a pointer to,
2917 set DECL_RTL appropriately. */
2918 if (data
->passed_pointer
)
2920 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2921 set_mem_attributes (x
, parm
, 1);
2922 SET_DECL_RTL (parm
, x
);
2925 SET_DECL_RTL (parm
, parmreg
);
2927 assign_parm_remove_parallels (data
);
2929 /* Copy the value into the register, thus bridging between
2930 assign_parm_find_data_types and expand_expr_real_1. */
2932 equiv_stack_parm
= data
->stack_parm
;
2933 validated_mem
= validize_mem (data
->entry_parm
);
2935 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2936 || promoted_nominal_mode
!= data
->promoted_mode
);
2940 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2941 && data
->nominal_mode
== data
->passed_mode
2942 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2944 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2945 mode, by the caller. We now have to convert it to
2946 NOMINAL_MODE, if different. However, PARMREG may be in
2947 a different mode than NOMINAL_MODE if it is being stored
2950 If ENTRY_PARM is a hard register, it might be in a register
2951 not valid for operating in its mode (e.g., an odd-numbered
2952 register for a DFmode). In that case, moves are the only
2953 thing valid, so we can't do a convert from there. This
2954 occurs when the calling sequence allow such misaligned
2957 In addition, the conversion may involve a call, which could
2958 clobber parameters which haven't been copied to pseudo
2961 First, we try to emit an insn which performs the necessary
2962 conversion. We verify that this insn does not clobber any
2965 enum insn_code icode
;
2968 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
2972 op1
= validated_mem
;
2973 if (icode
!= CODE_FOR_nothing
2974 && insn_operand_matches (icode
, 0, op0
)
2975 && insn_operand_matches (icode
, 1, op1
))
2977 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
2979 HARD_REG_SET hardregs
;
2982 insn
= gen_extend_insn (op0
, op1
, promoted_nominal_mode
,
2983 data
->passed_mode
, unsignedp
);
2985 insns
= get_insns ();
2988 CLEAR_HARD_REG_SET (hardregs
);
2989 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
2992 note_stores (PATTERN (insn
), record_hard_reg_sets
,
2994 if (!hard_reg_set_empty_p (hardregs
))
3003 if (equiv_stack_parm
!= NULL_RTX
)
3004 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3011 /* Nothing to do. */
3013 else if (need_conversion
)
3015 /* We did not have an insn to convert directly, or the sequence
3016 generated appeared unsafe. We must first copy the parm to a
3017 pseudo reg, and save the conversion until after all
3018 parameters have been moved. */
3021 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3023 emit_move_insn (tempreg
, validated_mem
);
3025 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3026 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3028 if (GET_CODE (tempreg
) == SUBREG
3029 && GET_MODE (tempreg
) == data
->nominal_mode
3030 && REG_P (SUBREG_REG (tempreg
))
3031 && data
->nominal_mode
== data
->passed_mode
3032 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3033 && GET_MODE_SIZE (GET_MODE (tempreg
))
3034 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3036 /* The argument is already sign/zero extended, so note it
3038 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3039 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3042 /* TREE_USED gets set erroneously during expand_assignment. */
3043 save_tree_used
= TREE_USED (parm
);
3044 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3045 TREE_USED (parm
) = save_tree_used
;
3046 all
->first_conversion_insn
= get_insns ();
3047 all
->last_conversion_insn
= get_last_insn ();
3050 did_conversion
= true;
3053 emit_move_insn (parmreg
, validated_mem
);
3055 /* If we were passed a pointer but the actual value can safely live
3056 in a register, put it in one. */
3057 if (data
->passed_pointer
3058 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3059 /* If by-reference argument was promoted, demote it. */
3060 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
3061 || use_register_for_decl (parm
)))
3063 /* We can't use nominal_mode, because it will have been set to
3064 Pmode above. We must use the actual mode of the parm. */
3065 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3066 mark_user_reg (parmreg
);
3068 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3070 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3071 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3073 push_to_sequence2 (all
->first_conversion_insn
,
3074 all
->last_conversion_insn
);
3075 emit_move_insn (tempreg
, DECL_RTL (parm
));
3076 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3077 emit_move_insn (parmreg
, tempreg
);
3078 all
->first_conversion_insn
= get_insns ();
3079 all
->last_conversion_insn
= get_last_insn ();
3082 did_conversion
= true;
3085 emit_move_insn (parmreg
, DECL_RTL (parm
));
3087 SET_DECL_RTL (parm
, parmreg
);
3089 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3091 data
->stack_parm
= NULL
;
3094 /* Mark the register as eliminable if we did no conversion and it was
3095 copied from memory at a fixed offset, and the arg pointer was not
3096 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3097 offset formed an invalid address, such memory-equivalences as we
3098 make here would screw up life analysis for it. */
3099 if (data
->nominal_mode
== data
->passed_mode
3101 && data
->stack_parm
!= 0
3102 && MEM_P (data
->stack_parm
)
3103 && data
->locate
.offset
.var
== 0
3104 && reg_mentioned_p (virtual_incoming_args_rtx
,
3105 XEXP (data
->stack_parm
, 0)))
3107 rtx linsn
= get_last_insn ();
3110 /* Mark complex types separately. */
3111 if (GET_CODE (parmreg
) == CONCAT
)
3113 enum machine_mode submode
3114 = GET_MODE_INNER (GET_MODE (parmreg
));
3115 int regnor
= REGNO (XEXP (parmreg
, 0));
3116 int regnoi
= REGNO (XEXP (parmreg
, 1));
3117 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3118 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3119 GET_MODE_SIZE (submode
));
3121 /* Scan backwards for the set of the real and
3123 for (sinsn
= linsn
; sinsn
!= 0;
3124 sinsn
= prev_nonnote_insn (sinsn
))
3126 set
= single_set (sinsn
);
3130 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3131 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3132 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3133 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3137 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3140 /* For pointer data type, suggest pointer register. */
3141 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3142 mark_reg_pointer (parmreg
,
3143 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3146 /* A subroutine of assign_parms. Allocate stack space to hold the current
3147 parameter. Get it there. Perform all ABI specified conversions. */
3150 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3151 struct assign_parm_data_one
*data
)
3153 /* Value must be stored in the stack slot STACK_PARM during function
3155 bool to_conversion
= false;
3157 assign_parm_remove_parallels (data
);
3159 if (data
->promoted_mode
!= data
->nominal_mode
)
3161 /* Conversion is required. */
3162 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3164 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3166 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3167 to_conversion
= true;
3169 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3170 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3172 if (data
->stack_parm
)
3174 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3175 GET_MODE (data
->stack_parm
));
3176 /* ??? This may need a big-endian conversion on sparc64. */
3178 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3179 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3180 set_mem_offset (data
->stack_parm
,
3181 MEM_OFFSET (data
->stack_parm
) + offset
);
3185 if (data
->entry_parm
!= data
->stack_parm
)
3189 if (data
->stack_parm
== 0)
3191 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3192 GET_MODE (data
->entry_parm
),
3193 TYPE_ALIGN (data
->passed_type
));
3195 = assign_stack_local (GET_MODE (data
->entry_parm
),
3196 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3198 set_mem_attributes (data
->stack_parm
, parm
, 1);
3201 dest
= validize_mem (data
->stack_parm
);
3202 src
= validize_mem (data
->entry_parm
);
3206 /* Use a block move to handle potentially misaligned entry_parm. */
3208 push_to_sequence2 (all
->first_conversion_insn
,
3209 all
->last_conversion_insn
);
3210 to_conversion
= true;
3212 emit_block_move (dest
, src
,
3213 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3217 emit_move_insn (dest
, src
);
3222 all
->first_conversion_insn
= get_insns ();
3223 all
->last_conversion_insn
= get_last_insn ();
3227 SET_DECL_RTL (parm
, data
->stack_parm
);
3230 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3231 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3234 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3235 VEC(tree
, heap
) *fnargs
)
3238 tree orig_fnargs
= all
->orig_fnargs
;
3241 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3243 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3244 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3246 rtx tmp
, real
, imag
;
3247 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3249 real
= DECL_RTL (VEC_index (tree
, fnargs
, i
));
3250 imag
= DECL_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3251 if (inner
!= GET_MODE (real
))
3253 real
= gen_lowpart_SUBREG (inner
, real
);
3254 imag
= gen_lowpart_SUBREG (inner
, imag
);
3257 if (TREE_ADDRESSABLE (parm
))
3260 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3261 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3263 TYPE_ALIGN (TREE_TYPE (parm
)));
3265 /* split_complex_arg put the real and imag parts in
3266 pseudos. Move them to memory. */
3267 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3268 set_mem_attributes (tmp
, parm
, 1);
3269 rmem
= adjust_address_nv (tmp
, inner
, 0);
3270 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3271 push_to_sequence2 (all
->first_conversion_insn
,
3272 all
->last_conversion_insn
);
3273 emit_move_insn (rmem
, real
);
3274 emit_move_insn (imem
, imag
);
3275 all
->first_conversion_insn
= get_insns ();
3276 all
->last_conversion_insn
= get_last_insn ();
3280 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3281 SET_DECL_RTL (parm
, tmp
);
3283 real
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
));
3284 imag
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3285 if (inner
!= GET_MODE (real
))
3287 real
= gen_lowpart_SUBREG (inner
, real
);
3288 imag
= gen_lowpart_SUBREG (inner
, imag
);
3290 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3291 set_decl_incoming_rtl (parm
, tmp
, false);
3297 /* Assign RTL expressions to the function's parameters. This may involve
3298 copying them into registers and using those registers as the DECL_RTL. */
3301 assign_parms (tree fndecl
)
3303 struct assign_parm_data_all all
;
3305 VEC(tree
, heap
) *fnargs
;
3308 crtl
->args
.internal_arg_pointer
3309 = targetm
.calls
.internal_arg_pointer ();
3311 assign_parms_initialize_all (&all
);
3312 fnargs
= assign_parms_augmented_arg_list (&all
);
3314 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3316 struct assign_parm_data_one data
;
3318 /* Extract the type of PARM; adjust it according to ABI. */
3319 assign_parm_find_data_types (&all
, parm
, &data
);
3321 /* Early out for errors and void parameters. */
3322 if (data
.passed_mode
== VOIDmode
)
3324 SET_DECL_RTL (parm
, const0_rtx
);
3325 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3329 /* Estimate stack alignment from parameter alignment. */
3330 if (SUPPORTS_STACK_ALIGNMENT
)
3333 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3335 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3337 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3338 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3339 TYPE_MODE (data
.nominal_type
),
3340 TYPE_ALIGN (data
.nominal_type
));
3341 if (crtl
->stack_alignment_estimated
< align
)
3343 gcc_assert (!crtl
->stack_realign_processed
);
3344 crtl
->stack_alignment_estimated
= align
;
3348 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3349 assign_parms_setup_varargs (&all
, &data
, false);
3351 /* Find out where the parameter arrives in this function. */
3352 assign_parm_find_entry_rtl (&all
, &data
);
3354 /* Find out where stack space for this parameter might be. */
3355 if (assign_parm_is_stack_parm (&all
, &data
))
3357 assign_parm_find_stack_rtl (parm
, &data
);
3358 assign_parm_adjust_entry_rtl (&data
);
3361 /* Record permanently how this parm was passed. */
3362 if (data
.passed_pointer
)
3365 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3367 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3370 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3372 /* Update info on where next arg arrives in registers. */
3373 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3374 data
.passed_type
, data
.named_arg
);
3376 assign_parm_adjust_stack_rtl (&data
);
3378 if (assign_parm_setup_block_p (&data
))
3379 assign_parm_setup_block (&all
, parm
, &data
);
3380 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3381 assign_parm_setup_reg (&all
, parm
, &data
);
3383 assign_parm_setup_stack (&all
, parm
, &data
);
3386 if (targetm
.calls
.split_complex_arg
)
3387 assign_parms_unsplit_complex (&all
, fnargs
);
3389 VEC_free (tree
, heap
, fnargs
);
3391 /* Output all parameter conversion instructions (possibly including calls)
3392 now that all parameters have been copied out of hard registers. */
3393 emit_insn (all
.first_conversion_insn
);
3395 /* Estimate reload stack alignment from scalar return mode. */
3396 if (SUPPORTS_STACK_ALIGNMENT
)
3398 if (DECL_RESULT (fndecl
))
3400 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3401 enum machine_mode mode
= TYPE_MODE (type
);
3405 && !AGGREGATE_TYPE_P (type
))
3407 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3408 if (crtl
->stack_alignment_estimated
< align
)
3410 gcc_assert (!crtl
->stack_realign_processed
);
3411 crtl
->stack_alignment_estimated
= align
;
3417 /* If we are receiving a struct value address as the first argument, set up
3418 the RTL for the function result. As this might require code to convert
3419 the transmitted address to Pmode, we do this here to ensure that possible
3420 preliminary conversions of the address have been emitted already. */
3421 if (all
.function_result_decl
)
3423 tree result
= DECL_RESULT (current_function_decl
);
3424 rtx addr
= DECL_RTL (all
.function_result_decl
);
3427 if (DECL_BY_REFERENCE (result
))
3429 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3434 SET_DECL_VALUE_EXPR (result
,
3435 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3436 all
.function_result_decl
));
3437 addr
= convert_memory_address (Pmode
, addr
);
3438 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3439 set_mem_attributes (x
, result
, 1);
3442 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3444 SET_DECL_RTL (result
, x
);
3447 /* We have aligned all the args, so add space for the pretend args. */
3448 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3449 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3450 crtl
->args
.size
= all
.stack_args_size
.constant
;
3452 /* Adjust function incoming argument size for alignment and
3455 #ifdef REG_PARM_STACK_SPACE
3456 crtl
->args
.size
= MAX (crtl
->args
.size
,
3457 REG_PARM_STACK_SPACE (fndecl
));
3460 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3461 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3463 #ifdef ARGS_GROW_DOWNWARD
3464 crtl
->args
.arg_offset_rtx
3465 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3466 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3467 size_int (-all
.stack_args_size
.constant
)),
3468 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3470 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3473 /* See how many bytes, if any, of its args a function should try to pop
3476 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3480 /* For stdarg.h function, save info about
3481 regs and stack space used by the named args. */
3483 crtl
->args
.info
= all
.args_so_far_v
;
3485 /* Set the rtx used for the function return value. Put this in its
3486 own variable so any optimizers that need this information don't have
3487 to include tree.h. Do this here so it gets done when an inlined
3488 function gets output. */
3491 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3492 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3494 /* If scalar return value was computed in a pseudo-reg, or was a named
3495 return value that got dumped to the stack, copy that to the hard
3497 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3499 tree decl_result
= DECL_RESULT (fndecl
);
3500 rtx decl_rtl
= DECL_RTL (decl_result
);
3502 if (REG_P (decl_rtl
)
3503 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3504 : DECL_REGISTER (decl_result
))
3508 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3510 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3511 /* The delay slot scheduler assumes that crtl->return_rtx
3512 holds the hard register containing the return value, not a
3513 temporary pseudo. */
3514 crtl
->return_rtx
= real_decl_rtl
;
3519 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3520 For all seen types, gimplify their sizes. */
3523 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3530 if (POINTER_TYPE_P (t
))
3532 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3533 && !TYPE_SIZES_GIMPLIFIED (t
))
3535 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3543 /* Gimplify the parameter list for current_function_decl. This involves
3544 evaluating SAVE_EXPRs of variable sized parameters and generating code
3545 to implement callee-copies reference parameters. Returns a sequence of
3546 statements to add to the beginning of the function. */
3549 gimplify_parameters (void)
3551 struct assign_parm_data_all all
;
3553 gimple_seq stmts
= NULL
;
3554 VEC(tree
, heap
) *fnargs
;
3557 assign_parms_initialize_all (&all
);
3558 fnargs
= assign_parms_augmented_arg_list (&all
);
3560 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3562 struct assign_parm_data_one data
;
3564 /* Extract the type of PARM; adjust it according to ABI. */
3565 assign_parm_find_data_types (&all
, parm
, &data
);
3567 /* Early out for errors and void parameters. */
3568 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3571 /* Update info on where next arg arrives in registers. */
3572 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3573 data
.passed_type
, data
.named_arg
);
3575 /* ??? Once upon a time variable_size stuffed parameter list
3576 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3577 turned out to be less than manageable in the gimple world.
3578 Now we have to hunt them down ourselves. */
3579 walk_tree_without_duplicates (&data
.passed_type
,
3580 gimplify_parm_type
, &stmts
);
3582 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3584 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3585 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3588 if (data
.passed_pointer
)
3590 tree type
= TREE_TYPE (data
.passed_type
);
3591 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3592 type
, data
.named_arg
))
3596 /* For constant-sized objects, this is trivial; for
3597 variable-sized objects, we have to play games. */
3598 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3599 && !(flag_stack_check
== GENERIC_STACK_CHECK
3600 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3601 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3603 local
= create_tmp_var (type
, get_name (parm
));
3604 DECL_IGNORED_P (local
) = 0;
3605 /* If PARM was addressable, move that flag over
3606 to the local copy, as its address will be taken,
3607 not the PARMs. Keep the parms address taken
3608 as we'll query that flag during gimplification. */
3609 if (TREE_ADDRESSABLE (parm
))
3610 TREE_ADDRESSABLE (local
) = 1;
3611 else if (TREE_CODE (type
) == COMPLEX_TYPE
3612 || TREE_CODE (type
) == VECTOR_TYPE
)
3613 DECL_GIMPLE_REG_P (local
) = 1;
3617 tree ptr_type
, addr
;
3619 ptr_type
= build_pointer_type (type
);
3620 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3621 DECL_IGNORED_P (addr
) = 0;
3622 local
= build_fold_indirect_ref (addr
);
3624 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3625 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3626 size_int (DECL_ALIGN (parm
)));
3628 /* The call has been built for a variable-sized object. */
3629 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3630 t
= fold_convert (ptr_type
, t
);
3631 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3632 gimplify_and_add (t
, &stmts
);
3635 gimplify_assign (local
, parm
, &stmts
);
3637 SET_DECL_VALUE_EXPR (parm
, local
);
3638 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3643 VEC_free (tree
, heap
, fnargs
);
3648 /* Compute the size and offset from the start of the stacked arguments for a
3649 parm passed in mode PASSED_MODE and with type TYPE.
3651 INITIAL_OFFSET_PTR points to the current offset into the stacked
3654 The starting offset and size for this parm are returned in
3655 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3656 nonzero, the offset is that of stack slot, which is returned in
3657 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3658 padding required from the initial offset ptr to the stack slot.
3660 IN_REGS is nonzero if the argument will be passed in registers. It will
3661 never be set if REG_PARM_STACK_SPACE is not defined.
3663 FNDECL is the function in which the argument was defined.
3665 There are two types of rounding that are done. The first, controlled by
3666 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3667 argument list to be aligned to the specific boundary (in bits). This
3668 rounding affects the initial and starting offsets, but not the argument
3671 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3672 optionally rounds the size of the parm to PARM_BOUNDARY. The
3673 initial offset is not affected by this rounding, while the size always
3674 is and the starting offset may be. */
3676 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3677 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3678 callers pass in the total size of args so far as
3679 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3682 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3683 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3684 struct args_size
*initial_offset_ptr
,
3685 struct locate_and_pad_arg_data
*locate
)
3688 enum direction where_pad
;
3689 unsigned int boundary
, round_boundary
;
3690 int reg_parm_stack_space
= 0;
3691 int part_size_in_regs
;
3693 #ifdef REG_PARM_STACK_SPACE
3694 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3696 /* If we have found a stack parm before we reach the end of the
3697 area reserved for registers, skip that area. */
3700 if (reg_parm_stack_space
> 0)
3702 if (initial_offset_ptr
->var
)
3704 initial_offset_ptr
->var
3705 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3706 ssize_int (reg_parm_stack_space
));
3707 initial_offset_ptr
->constant
= 0;
3709 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3710 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3713 #endif /* REG_PARM_STACK_SPACE */
3715 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3718 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3719 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3720 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3721 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3723 locate
->where_pad
= where_pad
;
3725 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3726 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3727 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3729 locate
->boundary
= boundary
;
3731 if (SUPPORTS_STACK_ALIGNMENT
)
3733 /* stack_alignment_estimated can't change after stack has been
3735 if (crtl
->stack_alignment_estimated
< boundary
)
3737 if (!crtl
->stack_realign_processed
)
3738 crtl
->stack_alignment_estimated
= boundary
;
3741 /* If stack is realigned and stack alignment value
3742 hasn't been finalized, it is OK not to increase
3743 stack_alignment_estimated. The bigger alignment
3744 requirement is recorded in stack_alignment_needed
3746 gcc_assert (!crtl
->stack_realign_finalized
3747 && crtl
->stack_realign_needed
);
3752 /* Remember if the outgoing parameter requires extra alignment on the
3753 calling function side. */
3754 if (crtl
->stack_alignment_needed
< boundary
)
3755 crtl
->stack_alignment_needed
= boundary
;
3756 if (crtl
->preferred_stack_boundary
< boundary
)
3757 crtl
->preferred_stack_boundary
= boundary
;
3759 #ifdef ARGS_GROW_DOWNWARD
3760 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3761 if (initial_offset_ptr
->var
)
3762 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3763 initial_offset_ptr
->var
);
3767 if (where_pad
!= none
3768 && (!host_integerp (sizetree
, 1)
3769 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3770 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
3771 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3774 locate
->slot_offset
.constant
+= part_size_in_regs
;
3777 #ifdef REG_PARM_STACK_SPACE
3778 || REG_PARM_STACK_SPACE (fndecl
) > 0
3781 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3782 &locate
->alignment_pad
);
3784 locate
->size
.constant
= (-initial_offset_ptr
->constant
3785 - locate
->slot_offset
.constant
);
3786 if (initial_offset_ptr
->var
)
3787 locate
->size
.var
= size_binop (MINUS_EXPR
,
3788 size_binop (MINUS_EXPR
,
3790 initial_offset_ptr
->var
),
3791 locate
->slot_offset
.var
);
3793 /* Pad_below needs the pre-rounded size to know how much to pad
3795 locate
->offset
= locate
->slot_offset
;
3796 if (where_pad
== downward
)
3797 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3799 #else /* !ARGS_GROW_DOWNWARD */
3801 #ifdef REG_PARM_STACK_SPACE
3802 || REG_PARM_STACK_SPACE (fndecl
) > 0
3805 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3806 &locate
->alignment_pad
);
3807 locate
->slot_offset
= *initial_offset_ptr
;
3809 #ifdef PUSH_ROUNDING
3810 if (passed_mode
!= BLKmode
)
3811 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3814 /* Pad_below needs the pre-rounded size to know how much to pad below
3815 so this must be done before rounding up. */
3816 locate
->offset
= locate
->slot_offset
;
3817 if (where_pad
== downward
)
3818 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3820 if (where_pad
!= none
3821 && (!host_integerp (sizetree
, 1)
3822 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3823 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
3825 ADD_PARM_SIZE (locate
->size
, sizetree
);
3827 locate
->size
.constant
-= part_size_in_regs
;
3828 #endif /* ARGS_GROW_DOWNWARD */
3830 #ifdef FUNCTION_ARG_OFFSET
3831 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3835 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3836 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3839 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3840 struct args_size
*alignment_pad
)
3842 tree save_var
= NULL_TREE
;
3843 HOST_WIDE_INT save_constant
= 0;
3844 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3845 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3847 #ifdef SPARC_STACK_BOUNDARY_HACK
3848 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3849 the real alignment of %sp. However, when it does this, the
3850 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3851 if (SPARC_STACK_BOUNDARY_HACK
)
3855 if (boundary
> PARM_BOUNDARY
)
3857 save_var
= offset_ptr
->var
;
3858 save_constant
= offset_ptr
->constant
;
3861 alignment_pad
->var
= NULL_TREE
;
3862 alignment_pad
->constant
= 0;
3864 if (boundary
> BITS_PER_UNIT
)
3866 if (offset_ptr
->var
)
3868 tree sp_offset_tree
= ssize_int (sp_offset
);
3869 tree offset
= size_binop (PLUS_EXPR
,
3870 ARGS_SIZE_TREE (*offset_ptr
),
3872 #ifdef ARGS_GROW_DOWNWARD
3873 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3875 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3878 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3879 /* ARGS_SIZE_TREE includes constant term. */
3880 offset_ptr
->constant
= 0;
3881 if (boundary
> PARM_BOUNDARY
)
3882 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3887 offset_ptr
->constant
= -sp_offset
+
3888 #ifdef ARGS_GROW_DOWNWARD
3889 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3891 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3893 if (boundary
> PARM_BOUNDARY
)
3894 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3900 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3902 if (passed_mode
!= BLKmode
)
3904 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3905 offset_ptr
->constant
3906 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3907 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3908 - GET_MODE_SIZE (passed_mode
));
3912 if (TREE_CODE (sizetree
) != INTEGER_CST
3913 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3915 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3916 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3918 ADD_PARM_SIZE (*offset_ptr
, s2
);
3919 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3925 /* True if register REGNO was alive at a place where `setjmp' was
3926 called and was set more than once or is an argument. Such regs may
3927 be clobbered by `longjmp'. */
3930 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3932 /* There appear to be cases where some local vars never reach the
3933 backend but have bogus regnos. */
3934 if (regno
>= max_reg_num ())
3937 return ((REG_N_SETS (regno
) > 1
3938 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
3939 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3942 /* Walk the tree of blocks describing the binding levels within a
3943 function and warn about variables the might be killed by setjmp or
3944 vfork. This is done after calling flow_analysis before register
3945 allocation since that will clobber the pseudo-regs to hard
3949 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3953 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
3955 if (TREE_CODE (decl
) == VAR_DECL
3956 && DECL_RTL_SET_P (decl
)
3957 && REG_P (DECL_RTL (decl
))
3958 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3959 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
3960 " %<longjmp%> or %<vfork%>", decl
);
3963 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3964 setjmp_vars_warning (setjmp_crosses
, sub
);
3967 /* Do the appropriate part of setjmp_vars_warning
3968 but for arguments instead of local variables. */
3971 setjmp_args_warning (bitmap setjmp_crosses
)
3974 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3975 decl
; decl
= DECL_CHAIN (decl
))
3976 if (DECL_RTL (decl
) != 0
3977 && REG_P (DECL_RTL (decl
))
3978 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3979 warning (OPT_Wclobbered
,
3980 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3984 /* Generate warning messages for variables live across setjmp. */
3987 generate_setjmp_warnings (void)
3989 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
3991 if (n_basic_blocks
== NUM_FIXED_BLOCKS
3992 || bitmap_empty_p (setjmp_crosses
))
3995 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
3996 setjmp_args_warning (setjmp_crosses
);
4000 /* Reverse the order of elements in the fragment chain T of blocks,
4001 and return the new head of the chain (old last element).
4002 In addition to that clear BLOCK_SAME_RANGE flags when needed
4003 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4004 its super fragment origin. */
4007 block_fragments_nreverse (tree t
)
4009 tree prev
= 0, block
, next
, prev_super
= 0;
4010 tree super
= BLOCK_SUPERCONTEXT (t
);
4011 if (BLOCK_FRAGMENT_ORIGIN (super
))
4012 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4013 for (block
= t
; block
; block
= next
)
4015 next
= BLOCK_FRAGMENT_CHAIN (block
);
4016 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4017 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4018 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4020 BLOCK_SAME_RANGE (block
) = 0;
4021 prev_super
= BLOCK_SUPERCONTEXT (block
);
4022 BLOCK_SUPERCONTEXT (block
) = super
;
4025 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4026 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4028 BLOCK_SAME_RANGE (t
) = 0;
4029 BLOCK_SUPERCONTEXT (t
) = super
;
4033 /* Reverse the order of elements in the chain T of blocks,
4034 and return the new head of the chain (old last element).
4035 Also do the same on subblocks and reverse the order of elements
4036 in BLOCK_FRAGMENT_CHAIN as well. */
4039 blocks_nreverse_all (tree t
)
4041 tree prev
= 0, block
, next
;
4042 for (block
= t
; block
; block
= next
)
4044 next
= BLOCK_CHAIN (block
);
4045 BLOCK_CHAIN (block
) = prev
;
4046 if (BLOCK_FRAGMENT_CHAIN (block
)
4047 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4049 BLOCK_FRAGMENT_CHAIN (block
)
4050 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4051 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4052 BLOCK_SAME_RANGE (block
) = 0;
4054 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4061 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4062 and create duplicate blocks. */
4063 /* ??? Need an option to either create block fragments or to create
4064 abstract origin duplicates of a source block. It really depends
4065 on what optimization has been performed. */
4068 reorder_blocks (void)
4070 tree block
= DECL_INITIAL (current_function_decl
);
4071 VEC(tree
,heap
) *block_stack
;
4073 if (block
== NULL_TREE
)
4076 block_stack
= VEC_alloc (tree
, heap
, 10);
4078 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4079 clear_block_marks (block
);
4081 /* Prune the old trees away, so that they don't get in the way. */
4082 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4083 BLOCK_CHAIN (block
) = NULL_TREE
;
4085 /* Recreate the block tree from the note nesting. */
4086 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4087 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4089 VEC_free (tree
, heap
, block_stack
);
4092 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4095 clear_block_marks (tree block
)
4099 TREE_ASM_WRITTEN (block
) = 0;
4100 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4101 block
= BLOCK_CHAIN (block
);
4106 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
4109 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4111 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4115 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4117 tree block
= NOTE_BLOCK (insn
);
4120 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4124 BLOCK_SAME_RANGE (prev_end
) = 0;
4125 prev_end
= NULL_TREE
;
4127 /* If we have seen this block before, that means it now
4128 spans multiple address regions. Create a new fragment. */
4129 if (TREE_ASM_WRITTEN (block
))
4131 tree new_block
= copy_node (block
);
4133 BLOCK_SAME_RANGE (new_block
) = 0;
4134 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4135 BLOCK_FRAGMENT_CHAIN (new_block
)
4136 = BLOCK_FRAGMENT_CHAIN (origin
);
4137 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4139 NOTE_BLOCK (insn
) = new_block
;
4143 if (prev_beg
== current_block
&& prev_beg
)
4144 BLOCK_SAME_RANGE (block
) = 1;
4148 BLOCK_SUBBLOCKS (block
) = 0;
4149 TREE_ASM_WRITTEN (block
) = 1;
4150 /* When there's only one block for the entire function,
4151 current_block == block and we mustn't do this, it
4152 will cause infinite recursion. */
4153 if (block
!= current_block
)
4156 if (block
!= origin
)
4157 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4158 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4161 if (VEC_empty (tree
, *p_block_stack
))
4162 super
= current_block
;
4165 super
= VEC_last (tree
, *p_block_stack
);
4166 gcc_assert (super
== current_block
4167 || BLOCK_FRAGMENT_ORIGIN (super
)
4170 BLOCK_SUPERCONTEXT (block
) = super
;
4171 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4172 BLOCK_SUBBLOCKS (current_block
) = block
;
4173 current_block
= origin
;
4175 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
4177 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4179 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
4180 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4181 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4182 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4183 prev_beg
= NULL_TREE
;
4184 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4185 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4190 prev_beg
= NULL_TREE
;
4192 BLOCK_SAME_RANGE (prev_end
) = 0;
4193 prev_end
= NULL_TREE
;
4198 /* Reverse the order of elements in the chain T of blocks,
4199 and return the new head of the chain (old last element). */
4202 blocks_nreverse (tree t
)
4204 tree prev
= 0, block
, next
;
4205 for (block
= t
; block
; block
= next
)
4207 next
= BLOCK_CHAIN (block
);
4208 BLOCK_CHAIN (block
) = prev
;
4214 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4215 by modifying the last node in chain 1 to point to chain 2. */
4218 block_chainon (tree op1
, tree op2
)
4227 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4229 BLOCK_CHAIN (t1
) = op2
;
4231 #ifdef ENABLE_TREE_CHECKING
4234 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4235 gcc_assert (t2
!= t1
);
4242 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4243 non-NULL, list them all into VECTOR, in a depth-first preorder
4244 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4248 all_blocks (tree block
, tree
*vector
)
4254 TREE_ASM_WRITTEN (block
) = 0;
4256 /* Record this block. */
4258 vector
[n_blocks
] = block
;
4262 /* Record the subblocks, and their subblocks... */
4263 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4264 vector
? vector
+ n_blocks
: 0);
4265 block
= BLOCK_CHAIN (block
);
4271 /* Return a vector containing all the blocks rooted at BLOCK. The
4272 number of elements in the vector is stored in N_BLOCKS_P. The
4273 vector is dynamically allocated; it is the caller's responsibility
4274 to call `free' on the pointer returned. */
4277 get_block_vector (tree block
, int *n_blocks_p
)
4281 *n_blocks_p
= all_blocks (block
, NULL
);
4282 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4283 all_blocks (block
, block_vector
);
4285 return block_vector
;
4288 static GTY(()) int next_block_index
= 2;
4290 /* Set BLOCK_NUMBER for all the blocks in FN. */
4293 number_blocks (tree fn
)
4299 /* For SDB and XCOFF debugging output, we start numbering the blocks
4300 from 1 within each function, rather than keeping a running
4302 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4303 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4304 next_block_index
= 1;
4307 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4309 /* The top-level BLOCK isn't numbered at all. */
4310 for (i
= 1; i
< n_blocks
; ++i
)
4311 /* We number the blocks from two. */
4312 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4314 free (block_vector
);
4319 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4322 debug_find_var_in_block_tree (tree var
, tree block
)
4326 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4330 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4332 tree ret
= debug_find_var_in_block_tree (var
, t
);
4340 /* Keep track of whether we're in a dummy function context. If we are,
4341 we don't want to invoke the set_current_function hook, because we'll
4342 get into trouble if the hook calls target_reinit () recursively or
4343 when the initial initialization is not yet complete. */
4345 static bool in_dummy_function
;
4347 /* Invoke the target hook when setting cfun. Update the optimization options
4348 if the function uses different options than the default. */
4351 invoke_set_current_function_hook (tree fndecl
)
4353 if (!in_dummy_function
)
4355 tree opts
= ((fndecl
)
4356 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4357 : optimization_default_node
);
4360 opts
= optimization_default_node
;
4362 /* Change optimization options if needed. */
4363 if (optimization_current_node
!= opts
)
4365 optimization_current_node
= opts
;
4366 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4369 targetm
.set_current_function (fndecl
);
4373 /* cfun should never be set directly; use this function. */
4376 set_cfun (struct function
*new_cfun
)
4378 if (cfun
!= new_cfun
)
4381 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4385 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4387 static VEC(function_p
,heap
) *cfun_stack
;
4389 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4392 push_cfun (struct function
*new_cfun
)
4394 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4395 set_cfun (new_cfun
);
4398 /* Pop cfun from the stack. */
4403 struct function
*new_cfun
= VEC_pop (function_p
, cfun_stack
);
4404 set_cfun (new_cfun
);
4407 /* Return value of funcdef and increase it. */
4409 get_next_funcdef_no (void)
4411 return funcdef_no
++;
4414 /* Return value of funcdef. */
4416 get_last_funcdef_no (void)
4421 /* Allocate a function structure for FNDECL and set its contents
4422 to the defaults. Set cfun to the newly-allocated object.
4423 Some of the helper functions invoked during initialization assume
4424 that cfun has already been set. Therefore, assign the new object
4425 directly into cfun and invoke the back end hook explicitly at the
4426 very end, rather than initializing a temporary and calling set_cfun
4429 ABSTRACT_P is true if this is a function that will never be seen by
4430 the middle-end. Such functions are front-end concepts (like C++
4431 function templates) that do not correspond directly to functions
4432 placed in object files. */
4435 allocate_struct_function (tree fndecl
, bool abstract_p
)
4438 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4440 cfun
= ggc_alloc_cleared_function ();
4442 init_eh_for_function ();
4444 if (init_machine_status
)
4445 cfun
->machine
= (*init_machine_status
) ();
4447 #ifdef OVERRIDE_ABI_FORMAT
4448 OVERRIDE_ABI_FORMAT (fndecl
);
4451 invoke_set_current_function_hook (fndecl
);
4453 if (fndecl
!= NULL_TREE
)
4455 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4456 cfun
->decl
= fndecl
;
4457 current_function_funcdef_no
= get_next_funcdef_no ();
4459 result
= DECL_RESULT (fndecl
);
4460 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4462 #ifdef PCC_STATIC_STRUCT_RETURN
4463 cfun
->returns_pcc_struct
= 1;
4465 cfun
->returns_struct
= 1;
4468 cfun
->stdarg
= stdarg_p (fntype
);
4470 /* Assume all registers in stdarg functions need to be saved. */
4471 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4472 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4474 /* ??? This could be set on a per-function basis by the front-end
4475 but is this worth the hassle? */
4476 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4480 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4481 instead of just setting it. */
4484 push_struct_function (tree fndecl
)
4486 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4487 allocate_struct_function (fndecl
, false);
4490 /* Reset crtl and other non-struct-function variables to defaults as
4491 appropriate for emitting rtl at the start of a function. */
4494 prepare_function_start (void)
4496 gcc_assert (!crtl
->emit
.x_last_insn
);
4499 init_varasm_status ();
4501 default_rtl_profile ();
4503 if (flag_stack_usage_info
)
4505 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4506 cfun
->su
->static_stack_size
= -1;
4509 cse_not_expected
= ! optimize
;
4511 /* Caller save not needed yet. */
4512 caller_save_needed
= 0;
4514 /* We haven't done register allocation yet. */
4517 /* Indicate that we have not instantiated virtual registers yet. */
4518 virtuals_instantiated
= 0;
4520 /* Indicate that we want CONCATs now. */
4521 generating_concat_p
= 1;
4523 /* Indicate we have no need of a frame pointer yet. */
4524 frame_pointer_needed
= 0;
4527 /* Initialize the rtl expansion mechanism so that we can do simple things
4528 like generate sequences. This is used to provide a context during global
4529 initialization of some passes. You must call expand_dummy_function_end
4530 to exit this context. */
4533 init_dummy_function_start (void)
4535 gcc_assert (!in_dummy_function
);
4536 in_dummy_function
= true;
4537 push_struct_function (NULL_TREE
);
4538 prepare_function_start ();
4541 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4542 and initialize static variables for generating RTL for the statements
4546 init_function_start (tree subr
)
4548 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4549 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4551 allocate_struct_function (subr
, false);
4552 prepare_function_start ();
4553 decide_function_section (subr
);
4555 /* Warn if this value is an aggregate type,
4556 regardless of which calling convention we are using for it. */
4557 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4558 warning (OPT_Waggregate_return
, "function returns an aggregate");
4563 expand_main_function (void)
4565 #if (defined(INVOKE__main) \
4566 || (!defined(HAS_INIT_SECTION) \
4567 && !defined(INIT_SECTION_ASM_OP) \
4568 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4569 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4573 /* Expand code to initialize the stack_protect_guard. This is invoked at
4574 the beginning of a function to be protected. */
4576 #ifndef HAVE_stack_protect_set
4577 # define HAVE_stack_protect_set 0
4578 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4582 stack_protect_prologue (void)
4584 tree guard_decl
= targetm
.stack_protect_guard ();
4587 x
= expand_normal (crtl
->stack_protect_guard
);
4588 y
= expand_normal (guard_decl
);
4590 /* Allow the target to copy from Y to X without leaking Y into a
4592 if (HAVE_stack_protect_set
)
4594 rtx insn
= gen_stack_protect_set (x
, y
);
4602 /* Otherwise do a straight move. */
4603 emit_move_insn (x
, y
);
4606 /* Expand code to verify the stack_protect_guard. This is invoked at
4607 the end of a function to be protected. */
4609 #ifndef HAVE_stack_protect_test
4610 # define HAVE_stack_protect_test 0
4611 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4615 stack_protect_epilogue (void)
4617 tree guard_decl
= targetm
.stack_protect_guard ();
4618 rtx label
= gen_label_rtx ();
4621 x
= expand_normal (crtl
->stack_protect_guard
);
4622 y
= expand_normal (guard_decl
);
4624 /* Allow the target to compare Y with X without leaking either into
4626 switch (HAVE_stack_protect_test
!= 0)
4629 tmp
= gen_stack_protect_test (x
, y
, label
);
4638 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4642 /* The noreturn predictor has been moved to the tree level. The rtl-level
4643 predictors estimate this branch about 20%, which isn't enough to get
4644 things moved out of line. Since this is the only extant case of adding
4645 a noreturn function at the rtl level, it doesn't seem worth doing ought
4646 except adding the prediction by hand. */
4647 tmp
= get_last_insn ();
4649 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4651 expand_expr_stmt (targetm
.stack_protect_fail ());
4655 /* Start the RTL for a new function, and set variables used for
4657 SUBR is the FUNCTION_DECL node.
4658 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4659 the function's parameters, which must be run at any return statement. */
4662 expand_function_start (tree subr
)
4664 /* Make sure volatile mem refs aren't considered
4665 valid operands of arithmetic insns. */
4666 init_recog_no_volatile ();
4670 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4673 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4675 /* Make the label for return statements to jump to. Do not special
4676 case machines with special return instructions -- they will be
4677 handled later during jump, ifcvt, or epilogue creation. */
4678 return_label
= gen_label_rtx ();
4680 /* Initialize rtx used to return the value. */
4681 /* Do this before assign_parms so that we copy the struct value address
4682 before any library calls that assign parms might generate. */
4684 /* Decide whether to return the value in memory or in a register. */
4685 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4687 /* Returning something that won't go in a register. */
4688 rtx value_address
= 0;
4690 #ifdef PCC_STATIC_STRUCT_RETURN
4691 if (cfun
->returns_pcc_struct
)
4693 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4694 value_address
= assemble_static_space (size
);
4699 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4700 /* Expect to be passed the address of a place to store the value.
4701 If it is passed as an argument, assign_parms will take care of
4705 value_address
= gen_reg_rtx (Pmode
);
4706 emit_move_insn (value_address
, sv
);
4711 rtx x
= value_address
;
4712 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4714 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4715 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4717 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4720 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4721 /* If return mode is void, this decl rtl should not be used. */
4722 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4725 /* Compute the return values into a pseudo reg, which we will copy
4726 into the true return register after the cleanups are done. */
4727 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4728 if (TYPE_MODE (return_type
) != BLKmode
4729 && targetm
.calls
.return_in_msb (return_type
))
4730 /* expand_function_end will insert the appropriate padding in
4731 this case. Use the return value's natural (unpadded) mode
4732 within the function proper. */
4733 SET_DECL_RTL (DECL_RESULT (subr
),
4734 gen_reg_rtx (TYPE_MODE (return_type
)));
4737 /* In order to figure out what mode to use for the pseudo, we
4738 figure out what the mode of the eventual return register will
4739 actually be, and use that. */
4740 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4742 /* Structures that are returned in registers are not
4743 aggregate_value_p, so we may see a PARALLEL or a REG. */
4744 if (REG_P (hard_reg
))
4745 SET_DECL_RTL (DECL_RESULT (subr
),
4746 gen_reg_rtx (GET_MODE (hard_reg
)));
4749 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4750 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4754 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4755 result to the real return register(s). */
4756 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4759 /* Initialize rtx for parameters and local variables.
4760 In some cases this requires emitting insns. */
4761 assign_parms (subr
);
4763 /* If function gets a static chain arg, store it. */
4764 if (cfun
->static_chain_decl
)
4766 tree parm
= cfun
->static_chain_decl
;
4767 rtx local
, chain
, insn
;
4769 local
= gen_reg_rtx (Pmode
);
4770 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4772 set_decl_incoming_rtl (parm
, chain
, false);
4773 SET_DECL_RTL (parm
, local
);
4774 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4776 insn
= emit_move_insn (local
, chain
);
4778 /* Mark the register as eliminable, similar to parameters. */
4780 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4781 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
4784 /* If the function receives a non-local goto, then store the
4785 bits we need to restore the frame pointer. */
4786 if (cfun
->nonlocal_goto_save_area
)
4791 /* ??? We need to do this save early. Unfortunately here is
4792 before the frame variable gets declared. Help out... */
4793 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4794 if (!DECL_RTL_SET_P (var
))
4797 t_save
= build4 (ARRAY_REF
,
4798 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
4799 cfun
->nonlocal_goto_save_area
,
4800 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4801 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4802 gcc_assert (GET_MODE (r_save
) == Pmode
);
4804 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4805 update_nonlocal_goto_save_area ();
4808 /* The following was moved from init_function_start.
4809 The move is supposed to make sdb output more accurate. */
4810 /* Indicate the beginning of the function body,
4811 as opposed to parm setup. */
4812 emit_note (NOTE_INSN_FUNCTION_BEG
);
4814 gcc_assert (NOTE_P (get_last_insn ()));
4816 parm_birth_insn
= get_last_insn ();
4821 PROFILE_HOOK (current_function_funcdef_no
);
4825 /* If we are doing generic stack checking, the probe should go here. */
4826 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4827 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4829 /* Make sure there is a line number after the function entry setup code. */
4830 force_next_line_note ();
4833 /* Undo the effects of init_dummy_function_start. */
4835 expand_dummy_function_end (void)
4837 gcc_assert (in_dummy_function
);
4839 /* End any sequences that failed to be closed due to syntax errors. */
4840 while (in_sequence_p ())
4843 /* Outside function body, can't compute type's actual size
4844 until next function's body starts. */
4846 free_after_parsing (cfun
);
4847 free_after_compilation (cfun
);
4849 in_dummy_function
= false;
4852 /* Call DOIT for each hard register used as a return value from
4853 the current function. */
4856 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4858 rtx outgoing
= crtl
->return_rtx
;
4863 if (REG_P (outgoing
))
4864 (*doit
) (outgoing
, arg
);
4865 else if (GET_CODE (outgoing
) == PARALLEL
)
4869 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4871 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4873 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4880 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4886 clobber_return_register (void)
4888 diddle_return_value (do_clobber_return_reg
, NULL
);
4890 /* In case we do use pseudo to return value, clobber it too. */
4891 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4893 tree decl_result
= DECL_RESULT (current_function_decl
);
4894 rtx decl_rtl
= DECL_RTL (decl_result
);
4895 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4897 do_clobber_return_reg (decl_rtl
, NULL
);
4903 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4909 use_return_register (void)
4911 diddle_return_value (do_use_return_reg
, NULL
);
4914 /* Possibly warn about unused parameters. */
4916 do_warn_unused_parameter (tree fn
)
4920 for (decl
= DECL_ARGUMENTS (fn
);
4921 decl
; decl
= DECL_CHAIN (decl
))
4922 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4923 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4924 && !TREE_NO_WARNING (decl
))
4925 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4928 static GTY(()) rtx initial_trampoline
;
4930 /* Generate RTL for the end of the current function. */
4933 expand_function_end (void)
4937 /* If arg_pointer_save_area was referenced only from a nested
4938 function, we will not have initialized it yet. Do that now. */
4939 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4940 get_arg_pointer_save_area ();
4942 /* If we are doing generic stack checking and this function makes calls,
4943 do a stack probe at the start of the function to ensure we have enough
4944 space for another stack frame. */
4945 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4949 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4952 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
4954 if (STACK_CHECK_MOVING_SP
)
4955 anti_adjust_stack_and_probe (max_frame_size
, true);
4957 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
4960 set_insn_locators (seq
, prologue_locator
);
4961 emit_insn_before (seq
, stack_check_probe_note
);
4966 /* End any sequences that failed to be closed due to syntax errors. */
4967 while (in_sequence_p ())
4970 clear_pending_stack_adjust ();
4971 do_pending_stack_adjust ();
4973 /* Output a linenumber for the end of the function.
4974 SDB depends on this. */
4975 force_next_line_note ();
4976 set_curr_insn_source_location (input_location
);
4978 /* Before the return label (if any), clobber the return
4979 registers so that they are not propagated live to the rest of
4980 the function. This can only happen with functions that drop
4981 through; if there had been a return statement, there would
4982 have either been a return rtx, or a jump to the return label.
4984 We delay actual code generation after the current_function_value_rtx
4986 clobber_after
= get_last_insn ();
4988 /* Output the label for the actual return from the function. */
4989 emit_label (return_label
);
4991 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
4993 /* Let except.c know where it should emit the call to unregister
4994 the function context for sjlj exceptions. */
4995 if (flag_exceptions
)
4996 sjlj_emit_function_exit_after (get_last_insn ());
5000 /* We want to ensure that instructions that may trap are not
5001 moved into the epilogue by scheduling, because we don't
5002 always emit unwind information for the epilogue. */
5003 if (cfun
->can_throw_non_call_exceptions
)
5004 emit_insn (gen_blockage ());
5007 /* If this is an implementation of throw, do what's necessary to
5008 communicate between __builtin_eh_return and the epilogue. */
5009 expand_eh_return ();
5011 /* If scalar return value was computed in a pseudo-reg, or was a named
5012 return value that got dumped to the stack, copy that to the hard
5014 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5016 tree decl_result
= DECL_RESULT (current_function_decl
);
5017 rtx decl_rtl
= DECL_RTL (decl_result
);
5019 if (REG_P (decl_rtl
)
5020 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5021 : DECL_REGISTER (decl_result
))
5023 rtx real_decl_rtl
= crtl
->return_rtx
;
5025 /* This should be set in assign_parms. */
5026 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5028 /* If this is a BLKmode structure being returned in registers,
5029 then use the mode computed in expand_return. Note that if
5030 decl_rtl is memory, then its mode may have been changed,
5031 but that crtl->return_rtx has not. */
5032 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5033 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5035 /* If a non-BLKmode return value should be padded at the least
5036 significant end of the register, shift it left by the appropriate
5037 amount. BLKmode results are handled using the group load/store
5039 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5040 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5042 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5043 REGNO (real_decl_rtl
)),
5045 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5047 /* If a named return value dumped decl_return to memory, then
5048 we may need to re-do the PROMOTE_MODE signed/unsigned
5050 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5052 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5053 promote_function_mode (TREE_TYPE (decl_result
),
5054 GET_MODE (decl_rtl
), &unsignedp
,
5055 TREE_TYPE (current_function_decl
), 1);
5057 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5059 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5061 /* If expand_function_start has created a PARALLEL for decl_rtl,
5062 move the result to the real return registers. Otherwise, do
5063 a group load from decl_rtl for a named return. */
5064 if (GET_CODE (decl_rtl
) == PARALLEL
)
5065 emit_group_move (real_decl_rtl
, decl_rtl
);
5067 emit_group_load (real_decl_rtl
, decl_rtl
,
5068 TREE_TYPE (decl_result
),
5069 int_size_in_bytes (TREE_TYPE (decl_result
)));
5071 /* In the case of complex integer modes smaller than a word, we'll
5072 need to generate some non-trivial bitfield insertions. Do that
5073 on a pseudo and not the hard register. */
5074 else if (GET_CODE (decl_rtl
) == CONCAT
5075 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5076 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5078 int old_generating_concat_p
;
5081 old_generating_concat_p
= generating_concat_p
;
5082 generating_concat_p
= 0;
5083 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5084 generating_concat_p
= old_generating_concat_p
;
5086 emit_move_insn (tmp
, decl_rtl
);
5087 emit_move_insn (real_decl_rtl
, tmp
);
5090 emit_move_insn (real_decl_rtl
, decl_rtl
);
5094 /* If returning a structure, arrange to return the address of the value
5095 in a place where debuggers expect to find it.
5097 If returning a structure PCC style,
5098 the caller also depends on this value.
5099 And cfun->returns_pcc_struct is not necessarily set. */
5100 if (cfun
->returns_struct
5101 || cfun
->returns_pcc_struct
)
5103 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5104 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5107 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5108 type
= TREE_TYPE (type
);
5110 value_address
= XEXP (value_address
, 0);
5112 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5113 current_function_decl
, true);
5115 /* Mark this as a function return value so integrate will delete the
5116 assignment and USE below when inlining this function. */
5117 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5119 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5120 value_address
= convert_memory_address (GET_MODE (outgoing
),
5123 emit_move_insn (outgoing
, value_address
);
5125 /* Show return register used to hold result (in this case the address
5127 crtl
->return_rtx
= outgoing
;
5130 /* Emit the actual code to clobber return register. */
5135 clobber_return_register ();
5139 emit_insn_after (seq
, clobber_after
);
5142 /* Output the label for the naked return from the function. */
5143 if (naked_return_label
)
5144 emit_label (naked_return_label
);
5146 /* @@@ This is a kludge. We want to ensure that instructions that
5147 may trap are not moved into the epilogue by scheduling, because
5148 we don't always emit unwind information for the epilogue. */
5149 if (cfun
->can_throw_non_call_exceptions
5150 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5151 emit_insn (gen_blockage ());
5153 /* If stack protection is enabled for this function, check the guard. */
5154 if (crtl
->stack_protect_guard
)
5155 stack_protect_epilogue ();
5157 /* If we had calls to alloca, and this machine needs
5158 an accurate stack pointer to exit the function,
5159 insert some code to save and restore the stack pointer. */
5160 if (! EXIT_IGNORE_STACK
5161 && cfun
->calls_alloca
)
5166 emit_stack_save (SAVE_FUNCTION
, &tem
);
5169 emit_insn_before (seq
, parm_birth_insn
);
5171 emit_stack_restore (SAVE_FUNCTION
, tem
);
5174 /* ??? This should no longer be necessary since stupid is no longer with
5175 us, but there are some parts of the compiler (eg reload_combine, and
5176 sh mach_dep_reorg) that still try and compute their own lifetime info
5177 instead of using the general framework. */
5178 use_return_register ();
5182 get_arg_pointer_save_area (void)
5184 rtx ret
= arg_pointer_save_area
;
5188 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5189 arg_pointer_save_area
= ret
;
5192 if (! crtl
->arg_pointer_save_area_init
)
5196 /* Save the arg pointer at the beginning of the function. The
5197 generated stack slot may not be a valid memory address, so we
5198 have to check it and fix it if necessary. */
5200 emit_move_insn (validize_mem (ret
),
5201 crtl
->args
.internal_arg_pointer
);
5205 push_topmost_sequence ();
5206 emit_insn_after (seq
, entry_of_function ());
5207 pop_topmost_sequence ();
5209 crtl
->arg_pointer_save_area_init
= true;
5215 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5216 for the first time. */
5219 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5222 htab_t hash
= *hashp
;
5226 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5228 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5230 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5231 gcc_assert (*slot
== NULL
);
5236 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5237 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5238 insn, then record COPY as well. */
5241 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5246 hash
= epilogue_insn_hash
;
5247 if (!hash
|| !htab_find (hash
, insn
))
5249 hash
= prologue_insn_hash
;
5250 if (!hash
|| !htab_find (hash
, insn
))
5254 slot
= htab_find_slot (hash
, copy
, INSERT
);
5255 gcc_assert (*slot
== NULL
);
5259 /* Set the locator of the insn chain starting at INSN to LOC. */
5261 set_insn_locators (rtx insn
, int loc
)
5263 while (insn
!= NULL_RTX
)
5266 INSN_LOCATOR (insn
) = loc
;
5267 insn
= NEXT_INSN (insn
);
5271 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5272 we can be running after reorg, SEQUENCE rtl is possible. */
5275 contains (const_rtx insn
, htab_t hash
)
5280 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5283 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5284 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5289 return htab_find (hash
, insn
) != NULL
;
5293 prologue_epilogue_contains (const_rtx insn
)
5295 if (contains (insn
, prologue_insn_hash
))
5297 if (contains (insn
, epilogue_insn_hash
))
5302 #ifdef HAVE_simple_return
5304 /* Return true if INSN requires the stack frame to be set up.
5305 PROLOGUE_USED contains the hard registers used in the function
5306 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5307 prologue to set up for the function. */
5309 requires_stack_frame_p (rtx insn
, HARD_REG_SET prologue_used
,
5310 HARD_REG_SET set_up_by_prologue
)
5313 HARD_REG_SET hardregs
;
5317 return !SIBLING_CALL_P (insn
);
5319 CLEAR_HARD_REG_SET (hardregs
);
5320 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
5322 rtx dreg
= DF_REF_REG (*df_rec
);
5327 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
5330 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
5332 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
5333 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5334 if (TEST_HARD_REG_BIT (hardregs
, regno
)
5335 && df_regs_ever_live_p (regno
))
5338 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
5340 rtx reg
= DF_REF_REG (*df_rec
);
5345 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
5348 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
5354 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5355 and if BB is its only predecessor. Return that block if so,
5356 otherwise return null. */
5359 next_block_for_reg (basic_block bb
, int regno
, int end_regno
)
5367 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5369 live
= df_get_live_in (e
->dest
);
5370 for (i
= regno
; i
< end_regno
; i
++)
5371 if (REGNO_REG_SET_P (live
, i
))
5373 if (live_edge
&& live_edge
!= e
)
5379 /* We can sometimes encounter dead code. Don't try to move it
5380 into the exit block. */
5381 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR
)
5384 /* Reject targets of abnormal edges. This is needed for correctness
5385 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5386 exception edges even though it is generally treated as call-saved
5387 for the majority of the compilation. Moving across abnormal edges
5388 isn't going to be interesting for shrink-wrap usage anyway. */
5389 if (live_edge
->flags
& EDGE_ABNORMAL
)
5392 if (EDGE_COUNT (live_edge
->dest
->preds
) > 1)
5395 return live_edge
->dest
;
5398 /* Try to move INSN from BB to a successor. Return true on success.
5399 USES and DEFS are the set of registers that are used and defined
5400 after INSN in BB. */
5403 move_insn_for_shrink_wrap (basic_block bb
, rtx insn
,
5404 const HARD_REG_SET uses
,
5405 const HARD_REG_SET defs
)
5408 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
5409 unsigned int i
, dregno
, end_dregno
, sregno
, end_sregno
;
5410 basic_block next_block
;
5412 /* Look for a simple register copy. */
5413 set
= single_set (insn
);
5416 src
= SET_SRC (set
);
5417 dest
= SET_DEST (set
);
5418 if (!REG_P (dest
) || !REG_P (src
))
5421 /* Make sure that the source register isn't defined later in BB. */
5422 sregno
= REGNO (src
);
5423 end_sregno
= END_REGNO (src
);
5424 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
5427 /* Make sure that the destination register isn't referenced later in BB. */
5428 dregno
= REGNO (dest
);
5429 end_dregno
= END_REGNO (dest
);
5430 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
5431 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
5434 /* See whether there is a successor block to which we could move INSN. */
5435 next_block
= next_block_for_reg (bb
, dregno
, end_dregno
);
5439 /* At this point we are committed to moving INSN, but let's try to
5440 move it as far as we can. */
5443 live_out
= df_get_live_out (bb
);
5444 live_in
= df_get_live_in (next_block
);
5447 /* Check whether BB uses DEST or clobbers DEST. We need to add
5448 INSN to BB if so. Either way, DEST is no longer live on entry,
5449 except for any part that overlaps SRC (next loop). */
5450 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
5451 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
5452 for (i
= dregno
; i
< end_dregno
; i
++)
5454 if (REGNO_REG_SET_P (bb_uses
, i
) || REGNO_REG_SET_P (bb_defs
, i
))
5456 CLEAR_REGNO_REG_SET (live_out
, i
);
5457 CLEAR_REGNO_REG_SET (live_in
, i
);
5460 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5461 Either way, SRC is now live on entry. */
5462 for (i
= sregno
; i
< end_sregno
; i
++)
5464 if (REGNO_REG_SET_P (bb_defs
, i
))
5466 SET_REGNO_REG_SET (live_out
, i
);
5467 SET_REGNO_REG_SET (live_in
, i
);
5470 /* If we don't need to add the move to BB, look for a single
5473 next_block
= next_block_for_reg (next_block
, dregno
, end_dregno
);
5477 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5479 for (i
= dregno
; i
< end_dregno
; i
++)
5481 CLEAR_REGNO_REG_SET (bb_uses
, i
);
5482 SET_REGNO_REG_SET (bb_defs
, i
);
5485 /* BB now uses SRC. */
5486 for (i
= sregno
; i
< end_sregno
; i
++)
5487 SET_REGNO_REG_SET (bb_uses
, i
);
5489 emit_insn_after (PATTERN (insn
), bb_note (bb
));
5494 /* Look for register copies in the first block of the function, and move
5495 them down into successor blocks if the register is used only on one
5496 path. This exposes more opportunities for shrink-wrapping. These
5497 kinds of sets often occur when incoming argument registers are moved
5498 to call-saved registers because their values are live across one or
5499 more calls during the function. */
5502 prepare_shrink_wrap (basic_block entry_block
)
5505 HARD_REG_SET uses
, defs
;
5508 CLEAR_HARD_REG_SET (uses
);
5509 CLEAR_HARD_REG_SET (defs
);
5510 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
5511 if (NONDEBUG_INSN_P (insn
)
5512 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
))
5514 /* Add all defined registers to DEFs. */
5515 for (ref
= DF_INSN_DEFS (insn
); *ref
; ref
++)
5517 x
= DF_REF_REG (*ref
);
5518 if (REG_P (x
) && HARD_REGISTER_P (x
))
5519 SET_HARD_REG_BIT (defs
, REGNO (x
));
5522 /* Add all used registers to USESs. */
5523 for (ref
= DF_INSN_USES (insn
); *ref
; ref
++)
5525 x
= DF_REF_REG (*ref
);
5526 if (REG_P (x
) && HARD_REGISTER_P (x
))
5527 SET_HARD_REG_BIT (uses
, REGNO (x
));
5535 /* Insert use of return register before the end of BB. */
5538 emit_use_return_register_into_block (basic_block bb
)
5542 use_return_register ();
5545 emit_insn_before (seq
, BB_END (bb
));
5549 /* Create a return pattern, either simple_return or return, depending on
5553 gen_return_pattern (bool simple_p
)
5555 #ifdef HAVE_simple_return
5556 return simple_p
? gen_simple_return () : gen_return ();
5558 gcc_assert (!simple_p
);
5559 return gen_return ();
5563 /* Insert an appropriate return pattern at the end of block BB. This
5564 also means updating block_for_insn appropriately. SIMPLE_P is
5565 the same as in gen_return_pattern and passed to it. */
5568 emit_return_into_block (bool simple_p
, basic_block bb
)
5571 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5572 pat
= PATTERN (jump
);
5573 if (GET_CODE (pat
) == PARALLEL
)
5574 pat
= XVECEXP (pat
, 0, 0);
5575 gcc_assert (ANY_RETURN_P (pat
));
5576 JUMP_LABEL (jump
) = pat
;
5580 /* Set JUMP_LABEL for a return insn. */
5583 set_return_jump_label (rtx returnjump
)
5585 rtx pat
= PATTERN (returnjump
);
5586 if (GET_CODE (pat
) == PARALLEL
)
5587 pat
= XVECEXP (pat
, 0, 0);
5588 if (ANY_RETURN_P (pat
))
5589 JUMP_LABEL (returnjump
) = pat
;
5591 JUMP_LABEL (returnjump
) = ret_rtx
;
5594 #ifdef HAVE_simple_return
5595 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5596 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5598 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx before
,
5599 bitmap_head
*need_prologue
)
5603 rtx insn
= BB_END (bb
);
5605 /* We know BB has a single successor, so there is no need to copy a
5606 simple jump at the end of BB. */
5607 if (simplejump_p (insn
))
5608 insn
= PREV_INSN (insn
);
5611 duplicate_insn_chain (BB_HEAD (bb
), insn
);
5615 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5616 if (active_insn_p (insn
))
5618 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
5619 bb
->index
, copy_bb
->index
, count
);
5621 insn
= get_insns ();
5623 emit_insn_before (insn
, before
);
5625 /* Redirect all the paths that need no prologue into copy_bb. */
5626 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
5627 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
5629 redirect_edge_and_branch_force (e
, copy_bb
);
5637 #if defined (HAVE_return) || defined (HAVE_simple_return)
5638 /* Return true if there are any active insns between HEAD and TAIL. */
5640 active_insn_between (rtx head
, rtx tail
)
5644 if (active_insn_p (tail
))
5648 tail
= PREV_INSN (tail
);
5653 /* LAST_BB is a block that exits, and empty of active instructions.
5654 Examine its predecessors for jumps that can be converted to
5655 (conditional) returns. */
5656 static VEC (edge
, heap
) *
5657 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5658 VEC (edge
, heap
) *unconverted ATTRIBUTE_UNUSED
)
5665 VEC(basic_block
,heap
) *src_bbs
;
5667 src_bbs
= VEC_alloc (basic_block
, heap
, EDGE_COUNT (last_bb
->preds
));
5668 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5669 if (e
->src
!= ENTRY_BLOCK_PTR
)
5670 VEC_quick_push (basic_block
, src_bbs
, e
->src
);
5672 label
= BB_HEAD (last_bb
);
5674 FOR_EACH_VEC_ELT (basic_block
, src_bbs
, i
, bb
)
5676 rtx jump
= BB_END (bb
);
5678 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5681 e
= find_edge (bb
, last_bb
);
5683 /* If we have an unconditional jump, we can replace that
5684 with a simple return instruction. */
5685 if (simplejump_p (jump
))
5687 /* The use of the return register might be present in the exit
5688 fallthru block. Either:
5689 - removing the use is safe, and we should remove the use in
5690 the exit fallthru block, or
5691 - removing the use is not safe, and we should add it here.
5692 For now, we conservatively choose the latter. Either of the
5693 2 helps in crossjumping. */
5694 emit_use_return_register_into_block (bb
);
5696 emit_return_into_block (simple_p
, bb
);
5700 /* If we have a conditional jump branching to the last
5701 block, we can try to replace that with a conditional
5702 return instruction. */
5703 else if (condjump_p (jump
))
5708 dest
= simple_return_rtx
;
5711 if (!redirect_jump (jump
, dest
, 0))
5713 #ifdef HAVE_simple_return
5718 "Failed to redirect bb %d branch.\n", bb
->index
);
5719 VEC_safe_push (edge
, heap
, unconverted
, e
);
5725 /* See comment in simplejump_p case above. */
5726 emit_use_return_register_into_block (bb
);
5728 /* If this block has only one successor, it both jumps
5729 and falls through to the fallthru block, so we can't
5731 if (single_succ_p (bb
))
5736 #ifdef HAVE_simple_return
5741 "Failed to redirect bb %d branch.\n", bb
->index
);
5742 VEC_safe_push (edge
, heap
, unconverted
, e
);
5748 /* Fix up the CFG for the successful change we just made. */
5749 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5750 e
->flags
&= ~EDGE_CROSSING
;
5752 VEC_free (basic_block
, heap
, src_bbs
);
5756 /* Emit a return insn for the exit fallthru block. */
5758 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5760 basic_block last_bb
= exit_fallthru_edge
->src
;
5762 if (JUMP_P (BB_END (last_bb
)))
5764 last_bb
= split_edge (exit_fallthru_edge
);
5765 exit_fallthru_edge
= single_succ_edge (last_bb
);
5767 emit_barrier_after (BB_END (last_bb
));
5768 emit_return_into_block (simple_p
, last_bb
);
5769 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5775 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5776 this into place with notes indicating where the prologue ends and where
5777 the epilogue begins. Update the basic block information when possible.
5779 Notes on epilogue placement:
5780 There are several kinds of edges to the exit block:
5781 * a single fallthru edge from LAST_BB
5782 * possibly, edges from blocks containing sibcalls
5783 * possibly, fake edges from infinite loops
5785 The epilogue is always emitted on the fallthru edge from the last basic
5786 block in the function, LAST_BB, into the exit block.
5788 If LAST_BB is empty except for a label, it is the target of every
5789 other basic block in the function that ends in a return. If a
5790 target has a return or simple_return pattern (possibly with
5791 conditional variants), these basic blocks can be changed so that a
5792 return insn is emitted into them, and their target is adjusted to
5793 the real exit block.
5795 Notes on shrink wrapping: We implement a fairly conservative
5796 version of shrink-wrapping rather than the textbook one. We only
5797 generate a single prologue and a single epilogue. This is
5798 sufficient to catch a number of interesting cases involving early
5801 First, we identify the blocks that require the prologue to occur before
5802 them. These are the ones that modify a call-saved register, or reference
5803 any of the stack or frame pointer registers. To simplify things, we then
5804 mark everything reachable from these blocks as also requiring a prologue.
5805 This takes care of loops automatically, and avoids the need to examine
5806 whether MEMs reference the frame, since it is sufficient to check for
5807 occurrences of the stack or frame pointer.
5809 We then compute the set of blocks for which the need for a prologue
5810 is anticipatable (borrowing terminology from the shrink-wrapping
5811 description in Muchnick's book). These are the blocks which either
5812 require a prologue themselves, or those that have only successors
5813 where the prologue is anticipatable. The prologue needs to be
5814 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5815 is not. For the moment, we ensure that only one such edge exists.
5817 The epilogue is placed as described above, but we make a
5818 distinction between inserting return and simple_return patterns
5819 when modifying other blocks that end in a return. Blocks that end
5820 in a sibcall omit the sibcall_epilogue if the block is not in
5824 thread_prologue_and_epilogue_insns (void)
5827 #ifdef HAVE_simple_return
5828 VEC (edge
, heap
) *unconverted_simple_returns
= NULL
;
5829 bool nonempty_prologue
;
5830 bitmap_head bb_flags
;
5831 unsigned max_grow_size
;
5834 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5835 rtx prologue_seq ATTRIBUTE_UNUSED
, split_prologue_seq ATTRIBUTE_UNUSED
;
5836 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5841 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
5845 epilogue_end
= NULL_RTX
;
5846 returnjump
= NULL_RTX
;
5848 /* Can't deal with multiple successors of the entry block at the
5849 moment. Function should always have at least one entry
5851 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5852 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
5853 orig_entry_edge
= entry_edge
;
5855 split_prologue_seq
= NULL_RTX
;
5856 if (flag_split_stack
5857 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5860 #ifndef HAVE_split_stack_prologue
5863 gcc_assert (HAVE_split_stack_prologue
);
5866 emit_insn (gen_split_stack_prologue ());
5867 split_prologue_seq
= get_insns ();
5870 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5871 set_insn_locators (split_prologue_seq
, prologue_locator
);
5875 prologue_seq
= NULL_RTX
;
5876 #ifdef HAVE_prologue
5880 seq
= gen_prologue ();
5883 /* Insert an explicit USE for the frame pointer
5884 if the profiling is on and the frame pointer is required. */
5885 if (crtl
->profile
&& frame_pointer_needed
)
5886 emit_use (hard_frame_pointer_rtx
);
5888 /* Retain a map of the prologue insns. */
5889 record_insns (seq
, NULL
, &prologue_insn_hash
);
5890 emit_note (NOTE_INSN_PROLOGUE_END
);
5892 /* Ensure that instructions are not moved into the prologue when
5893 profiling is on. The call to the profiling routine can be
5894 emitted within the live range of a call-clobbered register. */
5895 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5896 emit_insn (gen_blockage ());
5898 prologue_seq
= get_insns ();
5900 set_insn_locators (prologue_seq
, prologue_locator
);
5904 #ifdef HAVE_simple_return
5905 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5907 /* Try to perform a kind of shrink-wrapping, making sure the
5908 prologue/epilogue is emitted only around those parts of the
5909 function that require it. */
5911 nonempty_prologue
= false;
5912 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
5913 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
5915 nonempty_prologue
= true;
5919 if (flag_shrink_wrap
&& HAVE_simple_return
5920 && (targetm
.profile_before_prologue () || !crtl
->profile
)
5921 && nonempty_prologue
&& !crtl
->calls_eh_return
)
5923 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
5924 struct hard_reg_set_container set_up_by_prologue
;
5926 VEC(basic_block
, heap
) *vec
;
5928 bitmap_head bb_antic_flags
;
5929 bitmap_head bb_on_list
;
5930 bitmap_head bb_tail
;
5933 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
5935 /* Compute the registers set and used in the prologue. */
5936 CLEAR_HARD_REG_SET (prologue_clobbered
);
5937 CLEAR_HARD_REG_SET (prologue_used
);
5938 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
5940 HARD_REG_SET this_used
;
5941 if (!NONDEBUG_INSN_P (p_insn
))
5944 CLEAR_HARD_REG_SET (this_used
);
5945 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
5947 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
5948 IOR_HARD_REG_SET (prologue_used
, this_used
);
5949 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
5950 &prologue_clobbered
);
5953 prepare_shrink_wrap (entry_edge
->dest
);
5955 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
5956 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
5957 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
5959 /* Find the set of basic blocks that require a stack frame,
5960 and blocks that are too big to be duplicated. */
5962 vec
= VEC_alloc (basic_block
, heap
, n_basic_blocks
);
5964 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
5965 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
5966 STACK_POINTER_REGNUM
);
5967 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
5968 if (frame_pointer_needed
)
5969 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
5970 HARD_FRAME_POINTER_REGNUM
);
5971 if (pic_offset_table_rtx
)
5972 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
5973 PIC_OFFSET_TABLE_REGNUM
);
5974 if (stack_realign_drap
&& crtl
->drap_reg
)
5975 add_to_hard_reg_set (&set_up_by_prologue
.set
,
5976 GET_MODE (crtl
->drap_reg
),
5977 REGNO (crtl
->drap_reg
));
5978 if (targetm
.set_up_by_prologue
)
5979 targetm
.set_up_by_prologue (&set_up_by_prologue
);
5981 /* We don't use a different max size depending on
5982 optimize_bb_for_speed_p because increasing shrink-wrapping
5983 opportunities by duplicating tail blocks can actually result
5984 in an overall decrease in code size. */
5985 max_grow_size
= get_uncond_jump_length ();
5986 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
5993 FOR_BB_INSNS (bb
, insn
)
5994 if (NONDEBUG_INSN_P (insn
))
5996 if (requires_stack_frame_p (insn
, prologue_used
,
5997 set_up_by_prologue
.set
))
5999 if (bb
== entry_edge
->dest
)
6000 goto fail_shrinkwrap
;
6001 bitmap_set_bit (&bb_flags
, bb
->index
);
6002 VEC_quick_push (basic_block
, vec
, bb
);
6005 else if (size
<= max_grow_size
)
6007 size
+= get_attr_min_length (insn
);
6008 if (size
> max_grow_size
)
6009 bitmap_set_bit (&bb_on_list
, bb
->index
);
6014 /* Blocks that really need a prologue, or are too big for tails. */
6015 bitmap_ior_into (&bb_on_list
, &bb_flags
);
6017 /* For every basic block that needs a prologue, mark all blocks
6018 reachable from it, so as to ensure they are also seen as
6019 requiring a prologue. */
6020 while (!VEC_empty (basic_block
, vec
))
6022 basic_block tmp_bb
= VEC_pop (basic_block
, vec
);
6024 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6025 if (e
->dest
!= EXIT_BLOCK_PTR
6026 && bitmap_set_bit (&bb_flags
, e
->dest
->index
))
6027 VEC_quick_push (basic_block
, vec
, e
->dest
);
6030 /* Find the set of basic blocks that need no prologue, have a
6031 single successor, can be duplicated, meet a max size
6032 requirement, and go to the exit via like blocks. */
6033 VEC_quick_push (basic_block
, vec
, EXIT_BLOCK_PTR
);
6034 while (!VEC_empty (basic_block
, vec
))
6036 basic_block tmp_bb
= VEC_pop (basic_block
, vec
);
6038 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6039 if (single_succ_p (e
->src
)
6040 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
6041 && can_duplicate_block_p (e
->src
))
6046 /* If there is predecessor of e->src which doesn't
6047 need prologue and the edge is complex,
6048 we might not be able to redirect the branch
6049 to a copy of e->src. */
6050 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
6051 if ((pe
->flags
& EDGE_COMPLEX
) != 0
6052 && !bitmap_bit_p (&bb_flags
, pe
->src
->index
))
6054 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
6055 VEC_quick_push (basic_block
, vec
, e
->src
);
6059 /* Now walk backwards from every block that is marked as needing
6060 a prologue to compute the bb_antic_flags bitmap. Exclude
6061 tail blocks; They can be duplicated to be used on paths not
6062 needing a prologue. */
6063 bitmap_clear (&bb_on_list
);
6064 bitmap_and_compl (&bb_antic_flags
, &bb_flags
, &bb_tail
);
6067 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6069 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6070 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6071 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6072 VEC_quick_push (basic_block
, vec
, e
->src
);
6074 while (!VEC_empty (basic_block
, vec
))
6076 basic_block tmp_bb
= VEC_pop (basic_block
, vec
);
6077 bool all_set
= true;
6079 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
6080 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6081 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
6089 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
6090 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6091 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6092 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6093 VEC_quick_push (basic_block
, vec
, e
->src
);
6096 /* Find exactly one edge that leads to a block in ANTIC from
6097 a block that isn't. */
6098 if (!bitmap_bit_p (&bb_antic_flags
, entry_edge
->dest
->index
))
6101 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6103 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6104 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
6106 if (entry_edge
!= orig_entry_edge
)
6108 entry_edge
= orig_entry_edge
;
6110 fprintf (dump_file
, "More than one candidate edge.\n");
6111 goto fail_shrinkwrap
;
6114 fprintf (dump_file
, "Found candidate edge for "
6115 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
6121 if (entry_edge
!= orig_entry_edge
)
6123 /* Test whether the prologue is known to clobber any register
6124 (other than FP or SP) which are live on the edge. */
6125 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
6126 if (frame_pointer_needed
)
6127 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
6128 CLEAR_HARD_REG_SET (live_on_edge
);
6129 reg_set_to_hard_reg_set (&live_on_edge
,
6130 df_get_live_in (entry_edge
->dest
));
6131 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
6133 entry_edge
= orig_entry_edge
;
6136 "Shrink-wrapping aborted due to clobber.\n");
6139 if (entry_edge
!= orig_entry_edge
)
6141 crtl
->shrink_wrapped
= true;
6143 fprintf (dump_file
, "Performing shrink-wrapping.\n");
6145 /* Find tail blocks reachable from both blocks needing a
6146 prologue and blocks not needing a prologue. */
6147 if (!bitmap_empty_p (&bb_tail
))
6150 bool some_pro
, some_no_pro
;
6151 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
6153 some_pro
= some_no_pro
= false;
6154 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6156 if (bitmap_bit_p (&bb_flags
, e
->src
->index
))
6161 if (some_pro
&& some_no_pro
)
6162 VEC_quick_push (basic_block
, vec
, bb
);
6164 bitmap_clear_bit (&bb_tail
, bb
->index
);
6166 /* Find the head of each tail. */
6167 while (!VEC_empty (basic_block
, vec
))
6169 basic_block tbb
= VEC_pop (basic_block
, vec
);
6171 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
6174 while (single_succ_p (tbb
))
6176 tbb
= single_succ (tbb
);
6177 bitmap_clear_bit (&bb_tail
, tbb
->index
);
6180 /* Now duplicate the tails. */
6181 if (!bitmap_empty_p (&bb_tail
))
6182 FOR_EACH_BB_REVERSE (bb
)
6184 basic_block copy_bb
, tbb
;
6188 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
6191 /* Create a copy of BB, instructions and all, for
6192 use on paths that don't need a prologue.
6193 Ideal placement of the copy is on a fall-thru edge
6194 or after a block that would jump to the copy. */
6195 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6196 if (!bitmap_bit_p (&bb_flags
, e
->src
->index
)
6197 && single_succ_p (e
->src
))
6201 copy_bb
= create_basic_block (NEXT_INSN (BB_END (e
->src
)),
6203 BB_COPY_PARTITION (copy_bb
, e
->src
);
6207 /* Otherwise put the copy at the end of the function. */
6208 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
6209 EXIT_BLOCK_PTR
->prev_bb
);
6210 BB_COPY_PARTITION (copy_bb
, bb
);
6213 insert_point
= emit_note_after (NOTE_INSN_DELETED
,
6215 emit_barrier_after (BB_END (copy_bb
));
6220 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
6222 tbb
= single_succ (tbb
);
6223 if (tbb
== EXIT_BLOCK_PTR
)
6225 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
6229 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6230 We have yet to add a simple_return to the tails,
6231 as we'd like to first convert_jumps_to_returns in
6232 case the block is no longer used after that. */
6234 if (CALL_P (PREV_INSN (insert_point
))
6235 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
6236 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
6237 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR
, eflags
);
6239 /* verify_flow_info doesn't like a note after a
6241 delete_insn (insert_point
);
6242 if (bitmap_empty_p (&bb_tail
))
6248 bitmap_clear (&bb_tail
);
6249 bitmap_clear (&bb_antic_flags
);
6250 bitmap_clear (&bb_on_list
);
6251 VEC_free (basic_block
, heap
, vec
);
6255 if (split_prologue_seq
!= NULL_RTX
)
6257 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6260 if (prologue_seq
!= NULL_RTX
)
6262 insert_insn_on_edge (prologue_seq
, entry_edge
);
6266 /* If the exit block has no non-fake predecessors, we don't need
6268 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6269 if ((e
->flags
& EDGE_FAKE
) == 0)
6274 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
6276 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
6278 /* If we're allowed to generate a simple return instruction, then by
6279 definition we don't need a full epilogue. If the last basic
6280 block before the exit block does not contain active instructions,
6281 examine its predecessors and try to emit (conditional) return
6283 #ifdef HAVE_simple_return
6284 if (entry_edge
!= orig_entry_edge
)
6290 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6291 (but won't remove). Stop at end of current preds. */
6292 last
= EDGE_COUNT (EXIT_BLOCK_PTR
->preds
);
6293 for (i
= 0; i
< last
; i
++)
6295 e
= EDGE_I (EXIT_BLOCK_PTR
->preds
, i
);
6296 if (LABEL_P (BB_HEAD (e
->src
))
6297 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
6298 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
6299 unconverted_simple_returns
6300 = convert_jumps_to_returns (e
->src
, true,
6301 unconverted_simple_returns
);
6305 if (exit_fallthru_edge
!= NULL
6306 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
6307 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
6309 basic_block last_bb
;
6311 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
6312 returnjump
= BB_END (last_bb
);
6313 exit_fallthru_edge
= NULL
;
6320 if (exit_fallthru_edge
== NULL
)
6325 basic_block last_bb
= exit_fallthru_edge
->src
;
6327 if (LABEL_P (BB_HEAD (last_bb
))
6328 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
6329 convert_jumps_to_returns (last_bb
, false, NULL
);
6331 if (EDGE_COUNT (last_bb
->preds
) != 0
6332 && single_succ_p (last_bb
))
6334 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
6335 epilogue_end
= returnjump
= BB_END (last_bb
);
6336 #ifdef HAVE_simple_return
6337 /* Emitting the return may add a basic block.
6338 Fix bb_flags for the added block. */
6339 if (last_bb
!= exit_fallthru_edge
->src
)
6340 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6348 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6349 this marker for the splits of EH_RETURN patterns, and nothing else
6350 uses the flag in the meantime. */
6351 epilogue_completed
= 1;
6353 #ifdef HAVE_eh_return
6354 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6355 some targets, these get split to a special version of the epilogue
6356 code. In order to be able to properly annotate these with unwind
6357 info, try to split them now. If we get a valid split, drop an
6358 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6359 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6361 rtx prev
, last
, trial
;
6363 if (e
->flags
& EDGE_FALLTHRU
)
6365 last
= BB_END (e
->src
);
6366 if (!eh_returnjump_p (last
))
6369 prev
= PREV_INSN (last
);
6370 trial
= try_split (PATTERN (last
), last
, 1);
6374 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6375 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6379 /* If nothing falls through into the exit block, we don't need an
6382 if (exit_fallthru_edge
== NULL
)
6385 #ifdef HAVE_epilogue
6389 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6390 seq
= gen_epilogue ();
6392 emit_jump_insn (seq
);
6394 /* Retain a map of the epilogue insns. */
6395 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6396 set_insn_locators (seq
, epilogue_locator
);
6399 returnjump
= get_last_insn ();
6402 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6405 if (JUMP_P (returnjump
))
6406 set_return_jump_label (returnjump
);
6413 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6415 /* We have a fall-through edge to the exit block, the source is not
6416 at the end of the function, and there will be an assembler epilogue
6417 at the end of the function.
6418 We can't use force_nonfallthru here, because that would try to
6419 use return. Inserting a jump 'by hand' is extremely messy, so
6420 we take advantage of cfg_layout_finalize using
6421 fixup_fallthru_exit_predecessor. */
6422 cfg_layout_initialize (0);
6423 FOR_EACH_BB (cur_bb
)
6424 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6425 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6426 cur_bb
->aux
= cur_bb
->next_bb
;
6427 cfg_layout_finalize ();
6432 default_rtl_profile ();
6438 commit_edge_insertions ();
6440 /* Look for basic blocks within the prologue insns. */
6441 blocks
= sbitmap_alloc (last_basic_block
);
6442 sbitmap_zero (blocks
);
6443 SET_BIT (blocks
, entry_edge
->dest
->index
);
6444 SET_BIT (blocks
, orig_entry_edge
->dest
->index
);
6445 find_many_sub_basic_blocks (blocks
);
6446 sbitmap_free (blocks
);
6448 /* The epilogue insns we inserted may cause the exit edge to no longer
6450 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6452 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6453 && returnjump_p (BB_END (e
->src
)))
6454 e
->flags
&= ~EDGE_FALLTHRU
;
6458 #ifdef HAVE_simple_return
6459 /* If there were branches to an empty LAST_BB which we tried to
6460 convert to conditional simple_returns, but couldn't for some
6461 reason, create a block to hold a simple_return insn and redirect
6462 those remaining edges. */
6463 if (!VEC_empty (edge
, unconverted_simple_returns
))
6465 basic_block simple_return_block_hot
= NULL
;
6466 basic_block simple_return_block_cold
= NULL
;
6467 edge pending_edge_hot
= NULL
;
6468 edge pending_edge_cold
= NULL
;
6469 basic_block exit_pred
= EXIT_BLOCK_PTR
->prev_bb
;
6472 gcc_assert (entry_edge
!= orig_entry_edge
);
6474 /* See if we can reuse the last insn that was emitted for the
6476 if (returnjump
!= NULL_RTX
6477 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
6479 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
6480 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6481 simple_return_block_hot
= e
->dest
;
6483 simple_return_block_cold
= e
->dest
;
6486 /* Also check returns we might need to add to tail blocks. */
6487 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6488 if (EDGE_COUNT (e
->src
->preds
) != 0
6489 && (e
->flags
& EDGE_FAKE
) != 0
6490 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6492 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6493 pending_edge_hot
= e
;
6495 pending_edge_cold
= e
;
6498 FOR_EACH_VEC_ELT (edge
, unconverted_simple_returns
, i
, e
)
6500 basic_block
*pdest_bb
;
6503 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6505 pdest_bb
= &simple_return_block_hot
;
6506 pending
= pending_edge_hot
;
6510 pdest_bb
= &simple_return_block_cold
;
6511 pending
= pending_edge_cold
;
6514 if (*pdest_bb
== NULL
&& pending
!= NULL
)
6516 emit_return_into_block (true, pending
->src
);
6517 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6518 *pdest_bb
= pending
->src
;
6520 else if (*pdest_bb
== NULL
)
6525 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
6526 BB_COPY_PARTITION (bb
, e
->src
);
6527 start
= emit_jump_insn_after (gen_simple_return (),
6529 JUMP_LABEL (start
) = simple_return_rtx
;
6530 emit_barrier_after (start
);
6533 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
6535 redirect_edge_and_branch_force (e
, *pdest_bb
);
6537 VEC_free (edge
, heap
, unconverted_simple_returns
);
6540 if (entry_edge
!= orig_entry_edge
)
6542 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6543 if (EDGE_COUNT (e
->src
->preds
) != 0
6544 && (e
->flags
& EDGE_FAKE
) != 0
6545 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6547 emit_return_into_block (true, e
->src
);
6548 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6553 #ifdef HAVE_sibcall_epilogue
6554 /* Emit sibling epilogues before any sibling call sites. */
6555 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
6557 basic_block bb
= e
->src
;
6558 rtx insn
= BB_END (bb
);
6562 || ! SIBLING_CALL_P (insn
)
6563 #ifdef HAVE_simple_return
6564 || (entry_edge
!= orig_entry_edge
6565 && !bitmap_bit_p (&bb_flags
, bb
->index
))
6573 ep_seq
= gen_sibcall_epilogue ();
6577 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6582 /* Retain a map of the epilogue insns. Used in life analysis to
6583 avoid getting rid of sibcall epilogue insns. Do this before we
6584 actually emit the sequence. */
6585 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6586 set_insn_locators (seq
, epilogue_locator
);
6588 emit_insn_before (seq
, insn
);
6594 #ifdef HAVE_epilogue
6599 /* Similarly, move any line notes that appear after the epilogue.
6600 There is no need, however, to be quite so anal about the existence
6601 of such a note. Also possibly move
6602 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6604 for (insn
= epilogue_end
; insn
; insn
= next
)
6606 next
= NEXT_INSN (insn
);
6608 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6609 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6614 #ifdef HAVE_simple_return
6615 bitmap_clear (&bb_flags
);
6618 /* Threading the prologue and epilogue changes the artificial refs
6619 in the entry and exit blocks. */
6620 epilogue_completed
= 1;
6621 df_update_entry_exit_and_calls ();
6624 /* Reposition the prologue-end and epilogue-begin notes after
6625 instruction scheduling. */
6628 reposition_prologue_and_epilogue_notes (void)
6630 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6631 || defined (HAVE_sibcall_epilogue)
6632 /* Since the hash table is created on demand, the fact that it is
6633 non-null is a signal that it is non-empty. */
6634 if (prologue_insn_hash
!= NULL
)
6636 size_t len
= htab_elements (prologue_insn_hash
);
6637 rtx insn
, last
= NULL
, note
= NULL
;
6639 /* Scan from the beginning until we reach the last prologue insn. */
6640 /* ??? While we do have the CFG intact, there are two problems:
6641 (1) The prologue can contain loops (typically probing the stack),
6642 which means that the end of the prologue isn't in the first bb.
6643 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6644 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6648 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6651 else if (contains (insn
, prologue_insn_hash
))
6663 /* Scan forward looking for the PROLOGUE_END note. It should
6664 be right at the beginning of the block, possibly with other
6665 insn notes that got moved there. */
6666 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6669 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6674 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6676 last
= NEXT_INSN (last
);
6677 reorder_insns (note
, note
, last
);
6681 if (epilogue_insn_hash
!= NULL
)
6686 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6688 rtx insn
, first
= NULL
, note
= NULL
;
6689 basic_block bb
= e
->src
;
6691 /* Scan from the beginning until we reach the first epilogue insn. */
6692 FOR_BB_INSNS (bb
, insn
)
6696 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6703 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6713 /* If the function has a single basic block, and no real
6714 epilogue insns (e.g. sibcall with no cleanup), the
6715 epilogue note can get scheduled before the prologue
6716 note. If we have frame related prologue insns, having
6717 them scanned during the epilogue will result in a crash.
6718 In this case re-order the epilogue note to just before
6719 the last insn in the block. */
6721 first
= BB_END (bb
);
6723 if (PREV_INSN (first
) != note
)
6724 reorder_insns (note
, note
, PREV_INSN (first
));
6728 #endif /* HAVE_prologue or HAVE_epilogue */
6731 /* Returns the name of the current function. */
6733 current_function_name (void)
6737 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
6742 rest_of_handle_check_leaf_regs (void)
6744 #ifdef LEAF_REGISTERS
6745 current_function_uses_only_leaf_regs
6746 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6751 /* Insert a TYPE into the used types hash table of CFUN. */
6754 used_types_insert_helper (tree type
, struct function
*func
)
6756 if (type
!= NULL
&& func
!= NULL
)
6760 if (func
->used_types_hash
== NULL
)
6761 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
6762 htab_eq_pointer
, NULL
);
6763 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
6769 /* Given a type, insert it into the used hash table in cfun. */
6771 used_types_insert (tree t
)
6773 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6778 if (TREE_CODE (t
) == ERROR_MARK
)
6780 if (TYPE_NAME (t
) == NULL_TREE
6781 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6782 t
= TYPE_MAIN_VARIANT (t
);
6783 if (debug_info_level
> DINFO_LEVEL_NONE
)
6786 used_types_insert_helper (t
, cfun
);
6788 /* So this might be a type referenced by a global variable.
6789 Record that type so that we can later decide to emit its debug
6791 VEC_safe_push (tree
, gc
, types_used_by_cur_var_decl
, t
);
6795 /* Helper to Hash a struct types_used_by_vars_entry. */
6798 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6800 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6802 return iterative_hash_object (entry
->type
,
6803 iterative_hash_object (entry
->var_decl
, 0));
6806 /* Hash function of the types_used_by_vars_entry hash table. */
6809 types_used_by_vars_do_hash (const void *x
)
6811 const struct types_used_by_vars_entry
*entry
=
6812 (const struct types_used_by_vars_entry
*) x
;
6814 return hash_types_used_by_vars_entry (entry
);
6817 /*Equality function of the types_used_by_vars_entry hash table. */
6820 types_used_by_vars_eq (const void *x1
, const void *x2
)
6822 const struct types_used_by_vars_entry
*e1
=
6823 (const struct types_used_by_vars_entry
*) x1
;
6824 const struct types_used_by_vars_entry
*e2
=
6825 (const struct types_used_by_vars_entry
*)x2
;
6827 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6830 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6833 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6835 if (type
!= NULL
&& var_decl
!= NULL
)
6838 struct types_used_by_vars_entry e
;
6839 e
.var_decl
= var_decl
;
6841 if (types_used_by_vars_hash
== NULL
)
6842 types_used_by_vars_hash
=
6843 htab_create_ggc (37, types_used_by_vars_do_hash
,
6844 types_used_by_vars_eq
, NULL
);
6845 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
6846 hash_types_used_by_vars_entry (&e
), INSERT
);
6849 struct types_used_by_vars_entry
*entry
;
6850 entry
= ggc_alloc_types_used_by_vars_entry ();
6852 entry
->var_decl
= var_decl
;
6858 struct rtl_opt_pass pass_leaf_regs
=
6862 "*leaf_regs", /* name */
6864 rest_of_handle_check_leaf_regs
, /* execute */
6867 0, /* static_pass_number */
6868 TV_NONE
, /* tv_id */
6869 0, /* properties_required */
6870 0, /* properties_provided */
6871 0, /* properties_destroyed */
6872 0, /* todo_flags_start */
6873 0 /* todo_flags_finish */
6878 rest_of_handle_thread_prologue_and_epilogue (void)
6881 cleanup_cfg (CLEANUP_EXPENSIVE
);
6883 /* On some machines, the prologue and epilogue code, or parts thereof,
6884 can be represented as RTL. Doing so lets us schedule insns between
6885 it and the rest of the code and also allows delayed branch
6886 scheduling to operate in the epilogue. */
6887 thread_prologue_and_epilogue_insns ();
6889 /* The stack usage info is finalized during prologue expansion. */
6890 if (flag_stack_usage_info
)
6891 output_stack_usage ();
6896 struct rtl_opt_pass pass_thread_prologue_and_epilogue
=
6900 "pro_and_epilogue", /* name */
6902 rest_of_handle_thread_prologue_and_epilogue
, /* execute */
6905 0, /* static_pass_number */
6906 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6907 0, /* properties_required */
6908 0, /* properties_provided */
6909 0, /* properties_destroyed */
6910 TODO_verify_flow
, /* todo_flags_start */
6912 TODO_df_finish
| TODO_verify_rtl_sharing
|
6913 TODO_ggc_collect
/* todo_flags_finish */
6918 /* This mini-pass fixes fall-out from SSA in asm statements that have
6919 in-out constraints. Say you start with
6922 asm ("": "+mr" (inout));
6925 which is transformed very early to use explicit output and match operands:
6928 asm ("": "=mr" (inout) : "0" (inout));
6931 Or, after SSA and copyprop,
6933 asm ("": "=mr" (inout_2) : "0" (inout_1));
6936 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6937 they represent two separate values, so they will get different pseudo
6938 registers during expansion. Then, since the two operands need to match
6939 per the constraints, but use different pseudo registers, reload can
6940 only register a reload for these operands. But reloads can only be
6941 satisfied by hardregs, not by memory, so we need a register for this
6942 reload, just because we are presented with non-matching operands.
6943 So, even though we allow memory for this operand, no memory can be
6944 used for it, just because the two operands don't match. This can
6945 cause reload failures on register-starved targets.
6947 So it's a symptom of reload not being able to use memory for reloads
6948 or, alternatively it's also a symptom of both operands not coming into
6949 reload as matching (in which case the pseudo could go to memory just
6950 fine, as the alternative allows it, and no reload would be necessary).
6951 We fix the latter problem here, by transforming
6953 asm ("": "=mr" (inout_2) : "0" (inout_1));
6958 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6961 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
6964 bool changed
= false;
6965 rtx op
= SET_SRC (p_sets
[0]);
6966 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6967 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6968 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6970 memset (output_matched
, 0, noutputs
* sizeof (bool));
6971 for (i
= 0; i
< ninputs
; i
++)
6973 rtx input
, output
, insns
;
6974 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6978 if (*constraint
== '%')
6981 match
= strtoul (constraint
, &end
, 10);
6982 if (end
== constraint
)
6985 gcc_assert (match
< noutputs
);
6986 output
= SET_DEST (p_sets
[match
]);
6987 input
= RTVEC_ELT (inputs
, i
);
6988 /* Only do the transformation for pseudos. */
6989 if (! REG_P (output
)
6990 || rtx_equal_p (output
, input
)
6991 || (GET_MODE (input
) != VOIDmode
6992 && GET_MODE (input
) != GET_MODE (output
)))
6995 /* We can't do anything if the output is also used as input,
6996 as we're going to overwrite it. */
6997 for (j
= 0; j
< ninputs
; j
++)
6998 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
7003 /* Avoid changing the same input several times. For
7004 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7005 only change in once (to out1), rather than changing it
7006 first to out1 and afterwards to out2. */
7009 for (j
= 0; j
< noutputs
; j
++)
7010 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
7015 output_matched
[match
] = true;
7018 emit_move_insn (output
, input
);
7019 insns
= get_insns ();
7021 emit_insn_before (insns
, insn
);
7023 /* Now replace all mentions of the input with output. We can't
7024 just replace the occurrence in inputs[i], as the register might
7025 also be used in some other input (or even in an address of an
7026 output), which would mean possibly increasing the number of
7027 inputs by one (namely 'output' in addition), which might pose
7028 a too complicated problem for reload to solve. E.g. this situation:
7030 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7032 Here 'input' is used in two occurrences as input (once for the
7033 input operand, once for the address in the second output operand).
7034 If we would replace only the occurrence of the input operand (to
7035 make the matching) we would be left with this:
7038 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7040 Now we suddenly have two different input values (containing the same
7041 value, but different pseudos) where we formerly had only one.
7042 With more complicated asms this might lead to reload failures
7043 which wouldn't have happen without this pass. So, iterate over
7044 all operands and replace all occurrences of the register used. */
7045 for (j
= 0; j
< noutputs
; j
++)
7046 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
7047 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
7048 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
7050 for (j
= 0; j
< ninputs
; j
++)
7051 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
7052 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
7059 df_insn_rescan (insn
);
7063 rest_of_match_asm_constraints (void)
7066 rtx insn
, pat
, *p_sets
;
7069 if (!crtl
->has_asm_statement
)
7072 df_set_flags (DF_DEFER_INSN_RESCAN
);
7075 FOR_BB_INSNS (bb
, insn
)
7080 pat
= PATTERN (insn
);
7081 if (GET_CODE (pat
) == PARALLEL
)
7082 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
7083 else if (GET_CODE (pat
) == SET
)
7084 p_sets
= &PATTERN (insn
), noutputs
= 1;
7088 if (GET_CODE (*p_sets
) == SET
7089 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
7090 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
7094 return TODO_df_finish
;
7097 struct rtl_opt_pass pass_match_asm_constraints
=
7101 "asmcons", /* name */
7103 rest_of_match_asm_constraints
, /* execute */
7106 0, /* static_pass_number */
7107 TV_NONE
, /* tv_id */
7108 0, /* properties_required */
7109 0, /* properties_provided */
7110 0, /* properties_destroyed */
7111 0, /* todo_flags_start */
7112 0 /* todo_flags_finish */
7117 #include "gt-function.h"