1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
40 #include "stor-layout.h"
42 #include "stringpool.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
56 #include "langhooks.h"
58 #include "common/common-target.h"
59 #include "gimple-expr.h"
61 #include "tree-pass.h"
66 #include "bb-reorder.h"
70 /* So we can assign to cfun in this file. */
73 #ifndef STACK_ALIGNMENT_NEEDED
74 #define STACK_ALIGNMENT_NEEDED 1
77 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
79 /* Round a value to the lowest integer less than it that is a multiple of
80 the required alignment. Avoid using division in case the value is
81 negative. Assume the alignment is a power of two. */
82 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
84 /* Similar, but round to the next highest integer that meets the
86 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
88 /* Nonzero once virtual register instantiation has been done.
89 assign_stack_local uses frame_pointer_rtx when this is nonzero.
90 calls.c:emit_library_call_value_1 uses it to set up
91 post-instantiation libcalls. */
92 int virtuals_instantiated
;
94 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
95 static GTY(()) int funcdef_no
;
97 /* These variables hold pointers to functions to create and destroy
98 target specific, per-function data structures. */
99 struct machine_function
* (*init_machine_status
) (void);
101 /* The currently compiled function. */
102 struct function
*cfun
= 0;
104 /* These hashes record the prologue and epilogue insns. */
105 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
106 htab_t prologue_insn_hash
;
107 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
108 htab_t epilogue_insn_hash
;
111 htab_t types_used_by_vars_hash
= NULL
;
112 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
114 /* Forward declarations. */
116 static struct temp_slot
*find_temp_slot_from_address (rtx
);
117 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
118 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
119 static void reorder_blocks_1 (rtx
, tree
, vec
<tree
> *);
120 static int all_blocks (tree
, tree
*);
121 static tree
*get_block_vector (tree
, int *);
122 extern tree
debug_find_var_in_block_tree (tree
, tree
);
123 /* We always define `record_insns' even if it's not used so that we
124 can always export `prologue_epilogue_contains'. */
125 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
126 static bool contains (const_rtx
, htab_t
);
127 static void prepare_function_start (void);
128 static void do_clobber_return_reg (rtx
, void *);
129 static void do_use_return_reg (rtx
, void *);
131 /* Stack of nested functions. */
132 /* Keep track of the cfun stack. */
134 typedef struct function
*function_p
;
136 static vec
<function_p
> function_context_stack
;
138 /* Save the current context for compilation of a nested function.
139 This is called from language-specific code. */
142 push_function_context (void)
145 allocate_struct_function (NULL
, false);
147 function_context_stack
.safe_push (cfun
);
151 /* Restore the last saved context, at the end of a nested function.
152 This function is called from language-specific code. */
155 pop_function_context (void)
157 struct function
*p
= function_context_stack
.pop ();
159 current_function_decl
= p
->decl
;
161 /* Reset variables that have known state during rtx generation. */
162 virtuals_instantiated
= 0;
163 generating_concat_p
= 1;
166 /* Clear out all parts of the state in F that can safely be discarded
167 after the function has been parsed, but not compiled, to let
168 garbage collection reclaim the memory. */
171 free_after_parsing (struct function
*f
)
176 /* Clear out all parts of the state in F that can safely be discarded
177 after the function has been compiled, to let garbage collection
178 reclaim the memory. */
181 free_after_compilation (struct function
*f
)
183 prologue_insn_hash
= NULL
;
184 epilogue_insn_hash
= NULL
;
186 free (crtl
->emit
.regno_pointer_align
);
188 memset (crtl
, 0, sizeof (struct rtl_data
));
193 regno_reg_rtx
= NULL
;
196 /* Return size needed for stack frame based on slots so far allocated.
197 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
198 the caller may have to do that. */
201 get_frame_size (void)
203 if (FRAME_GROWS_DOWNWARD
)
204 return -frame_offset
;
209 /* Issue an error message and return TRUE if frame OFFSET overflows in
210 the signed target pointer arithmetics for function FUNC. Otherwise
214 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
216 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
218 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
219 /* Leave room for the fixed part of the frame. */
220 - 64 * UNITS_PER_WORD
)
222 error_at (DECL_SOURCE_LOCATION (func
),
223 "total size of local objects too large");
230 /* Return stack slot alignment in bits for TYPE and MODE. */
233 get_stack_local_alignment (tree type
, enum machine_mode mode
)
235 unsigned int alignment
;
238 alignment
= BIGGEST_ALIGNMENT
;
240 alignment
= GET_MODE_ALIGNMENT (mode
);
242 /* Allow the frond-end to (possibly) increase the alignment of this
245 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
247 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
250 /* Determine whether it is possible to fit a stack slot of size SIZE and
251 alignment ALIGNMENT into an area in the stack frame that starts at
252 frame offset START and has a length of LENGTH. If so, store the frame
253 offset to be used for the stack slot in *POFFSET and return true;
254 return false otherwise. This function will extend the frame size when
255 given a start/length pair that lies at the end of the frame. */
258 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
259 HOST_WIDE_INT size
, unsigned int alignment
,
260 HOST_WIDE_INT
*poffset
)
262 HOST_WIDE_INT this_frame_offset
;
263 int frame_off
, frame_alignment
, frame_phase
;
265 /* Calculate how many bytes the start of local variables is off from
267 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
268 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
269 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
271 /* Round the frame offset to the specified alignment. */
273 /* We must be careful here, since FRAME_OFFSET might be negative and
274 division with a negative dividend isn't as well defined as we might
275 like. So we instead assume that ALIGNMENT is a power of two and
276 use logical operations which are unambiguous. */
277 if (FRAME_GROWS_DOWNWARD
)
279 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
280 (unsigned HOST_WIDE_INT
) alignment
)
284 = (CEIL_ROUND (start
- frame_phase
,
285 (unsigned HOST_WIDE_INT
) alignment
)
288 /* See if it fits. If this space is at the edge of the frame,
289 consider extending the frame to make it fit. Our caller relies on
290 this when allocating a new slot. */
291 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
292 frame_offset
= this_frame_offset
;
293 else if (this_frame_offset
< start
)
295 else if (start
+ length
== frame_offset
296 && this_frame_offset
+ size
> start
+ length
)
297 frame_offset
= this_frame_offset
+ size
;
298 else if (this_frame_offset
+ size
> start
+ length
)
301 *poffset
= this_frame_offset
;
305 /* Create a new frame_space structure describing free space in the stack
306 frame beginning at START and ending at END, and chain it into the
307 function's frame_space_list. */
310 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
312 struct frame_space
*space
= ggc_alloc_frame_space ();
313 space
->next
= crtl
->frame_space_list
;
314 crtl
->frame_space_list
= space
;
315 space
->start
= start
;
316 space
->length
= end
- start
;
319 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
320 with machine mode MODE.
322 ALIGN controls the amount of alignment for the address of the slot:
323 0 means according to MODE,
324 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
325 -2 means use BITS_PER_UNIT,
326 positive specifies alignment boundary in bits.
328 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
329 alignment and ASLK_RECORD_PAD bit set if we should remember
330 extra space we allocated for alignment purposes. When we are
331 called from assign_stack_temp_for_type, it is not set so we don't
332 track the same stack slot in two independent lists.
334 We do not round to stack_boundary here. */
337 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
341 int bigend_correction
= 0;
342 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
343 unsigned int alignment
, alignment_in_bits
;
347 alignment
= get_stack_local_alignment (NULL
, mode
);
348 alignment
/= BITS_PER_UNIT
;
350 else if (align
== -1)
352 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
353 size
= CEIL_ROUND (size
, alignment
);
355 else if (align
== -2)
356 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
358 alignment
= align
/ BITS_PER_UNIT
;
360 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
362 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
363 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
365 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
366 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
369 if (SUPPORTS_STACK_ALIGNMENT
)
371 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
373 if (!crtl
->stack_realign_processed
)
374 crtl
->stack_alignment_estimated
= alignment_in_bits
;
377 /* If stack is realigned and stack alignment value
378 hasn't been finalized, it is OK not to increase
379 stack_alignment_estimated. The bigger alignment
380 requirement is recorded in stack_alignment_needed
382 gcc_assert (!crtl
->stack_realign_finalized
);
383 if (!crtl
->stack_realign_needed
)
385 /* It is OK to reduce the alignment as long as the
386 requested size is 0 or the estimated stack
387 alignment >= mode alignment. */
388 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
390 || (crtl
->stack_alignment_estimated
391 >= GET_MODE_ALIGNMENT (mode
)));
392 alignment_in_bits
= crtl
->stack_alignment_estimated
;
393 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
399 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
400 crtl
->stack_alignment_needed
= alignment_in_bits
;
401 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
402 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
404 if (mode
!= BLKmode
|| size
!= 0)
406 if (kind
& ASLK_RECORD_PAD
)
408 struct frame_space
**psp
;
410 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
412 struct frame_space
*space
= *psp
;
413 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
414 alignment
, &slot_offset
))
417 if (slot_offset
> space
->start
)
418 add_frame_space (space
->start
, slot_offset
);
419 if (slot_offset
+ size
< space
->start
+ space
->length
)
420 add_frame_space (slot_offset
+ size
,
421 space
->start
+ space
->length
);
426 else if (!STACK_ALIGNMENT_NEEDED
)
428 slot_offset
= frame_offset
;
432 old_frame_offset
= frame_offset
;
434 if (FRAME_GROWS_DOWNWARD
)
436 frame_offset
-= size
;
437 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
439 if (kind
& ASLK_RECORD_PAD
)
441 if (slot_offset
> frame_offset
)
442 add_frame_space (frame_offset
, slot_offset
);
443 if (slot_offset
+ size
< old_frame_offset
)
444 add_frame_space (slot_offset
+ size
, old_frame_offset
);
449 frame_offset
+= size
;
450 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
452 if (kind
& ASLK_RECORD_PAD
)
454 if (slot_offset
> old_frame_offset
)
455 add_frame_space (old_frame_offset
, slot_offset
);
456 if (slot_offset
+ size
< frame_offset
)
457 add_frame_space (slot_offset
+ size
, frame_offset
);
462 /* On a big-endian machine, if we are allocating more space than we will use,
463 use the least significant bytes of those that are allocated. */
464 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
465 bigend_correction
= size
- GET_MODE_SIZE (mode
);
467 /* If we have already instantiated virtual registers, return the actual
468 address relative to the frame pointer. */
469 if (virtuals_instantiated
)
470 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
472 (slot_offset
+ bigend_correction
473 + STARTING_FRAME_OFFSET
, Pmode
));
475 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
477 (slot_offset
+ bigend_correction
,
480 x
= gen_rtx_MEM (mode
, addr
);
481 set_mem_align (x
, alignment_in_bits
);
482 MEM_NOTRAP_P (x
) = 1;
485 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
487 if (frame_offset_overflow (frame_offset
, current_function_decl
))
493 /* Wrap up assign_stack_local_1 with last parameter as false. */
496 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
498 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
501 /* In order to evaluate some expressions, such as function calls returning
502 structures in memory, we need to temporarily allocate stack locations.
503 We record each allocated temporary in the following structure.
505 Associated with each temporary slot is a nesting level. When we pop up
506 one level, all temporaries associated with the previous level are freed.
507 Normally, all temporaries are freed after the execution of the statement
508 in which they were created. However, if we are inside a ({...}) grouping,
509 the result may be in a temporary and hence must be preserved. If the
510 result could be in a temporary, we preserve it if we can determine which
511 one it is in. If we cannot determine which temporary may contain the
512 result, all temporaries are preserved. A temporary is preserved by
513 pretending it was allocated at the previous nesting level. */
515 struct GTY(()) temp_slot
{
516 /* Points to next temporary slot. */
517 struct temp_slot
*next
;
518 /* Points to previous temporary slot. */
519 struct temp_slot
*prev
;
520 /* The rtx to used to reference the slot. */
522 /* The size, in units, of the slot. */
524 /* The type of the object in the slot, or zero if it doesn't correspond
525 to a type. We use this to determine whether a slot can be reused.
526 It can be reused if objects of the type of the new slot will always
527 conflict with objects of the type of the old slot. */
529 /* The alignment (in bits) of the slot. */
531 /* Nonzero if this temporary is currently in use. */
533 /* Nesting level at which this slot is being used. */
535 /* The offset of the slot from the frame_pointer, including extra space
536 for alignment. This info is for combine_temp_slots. */
537 HOST_WIDE_INT base_offset
;
538 /* The size of the slot, including extra space for alignment. This
539 info is for combine_temp_slots. */
540 HOST_WIDE_INT full_size
;
543 /* A table of addresses that represent a stack slot. The table is a mapping
544 from address RTXen to a temp slot. */
545 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
546 static size_t n_temp_slots_in_use
;
548 /* Entry for the above hash table. */
549 struct GTY(()) temp_slot_address_entry
{
552 struct temp_slot
*temp_slot
;
555 /* Removes temporary slot TEMP from LIST. */
558 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
561 temp
->next
->prev
= temp
->prev
;
563 temp
->prev
->next
= temp
->next
;
567 temp
->prev
= temp
->next
= NULL
;
570 /* Inserts temporary slot TEMP to LIST. */
573 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
577 (*list
)->prev
= temp
;
582 /* Returns the list of used temp slots at LEVEL. */
584 static struct temp_slot
**
585 temp_slots_at_level (int level
)
587 if (level
>= (int) vec_safe_length (used_temp_slots
))
588 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
590 return &(*used_temp_slots
)[level
];
593 /* Returns the maximal temporary slot level. */
596 max_slot_level (void)
598 if (!used_temp_slots
)
601 return used_temp_slots
->length () - 1;
604 /* Moves temporary slot TEMP to LEVEL. */
607 move_slot_to_level (struct temp_slot
*temp
, int level
)
609 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
610 insert_slot_to_list (temp
, temp_slots_at_level (level
));
614 /* Make temporary slot TEMP available. */
617 make_slot_available (struct temp_slot
*temp
)
619 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
620 insert_slot_to_list (temp
, &avail_temp_slots
);
623 n_temp_slots_in_use
--;
626 /* Compute the hash value for an address -> temp slot mapping.
627 The value is cached on the mapping entry. */
629 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
631 int do_not_record
= 0;
632 return hash_rtx (t
->address
, GET_MODE (t
->address
),
633 &do_not_record
, NULL
, false);
636 /* Return the hash value for an address -> temp slot mapping. */
638 temp_slot_address_hash (const void *p
)
640 const struct temp_slot_address_entry
*t
;
641 t
= (const struct temp_slot_address_entry
*) p
;
645 /* Compare two address -> temp slot mapping entries. */
647 temp_slot_address_eq (const void *p1
, const void *p2
)
649 const struct temp_slot_address_entry
*t1
, *t2
;
650 t1
= (const struct temp_slot_address_entry
*) p1
;
651 t2
= (const struct temp_slot_address_entry
*) p2
;
652 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
655 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
657 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
660 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
661 t
->address
= address
;
662 t
->temp_slot
= temp_slot
;
663 t
->hash
= temp_slot_address_compute_hash (t
);
664 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
668 /* Remove an address -> temp slot mapping entry if the temp slot is
669 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
671 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
673 const struct temp_slot_address_entry
*t
;
674 t
= (const struct temp_slot_address_entry
*) *slot
;
675 if (! t
->temp_slot
->in_use
)
676 htab_clear_slot (temp_slot_address_table
, slot
);
680 /* Remove all mappings of addresses to unused temp slots. */
682 remove_unused_temp_slot_addresses (void)
684 /* Use quicker clearing if there aren't any active temp slots. */
685 if (n_temp_slots_in_use
)
686 htab_traverse (temp_slot_address_table
,
687 remove_unused_temp_slot_addresses_1
,
690 htab_empty (temp_slot_address_table
);
693 /* Find the temp slot corresponding to the object at address X. */
695 static struct temp_slot
*
696 find_temp_slot_from_address (rtx x
)
699 struct temp_slot_address_entry tmp
, *t
;
701 /* First try the easy way:
702 See if X exists in the address -> temp slot mapping. */
704 tmp
.temp_slot
= NULL
;
705 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
706 t
= (struct temp_slot_address_entry
*)
707 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
711 /* If we have a sum involving a register, see if it points to a temp
713 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
714 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
716 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
717 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
720 /* Last resort: Address is a virtual stack var address. */
721 if (GET_CODE (x
) == PLUS
722 && XEXP (x
, 0) == virtual_stack_vars_rtx
723 && CONST_INT_P (XEXP (x
, 1)))
726 for (i
= max_slot_level (); i
>= 0; i
--)
727 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
729 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
730 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
738 /* Allocate a temporary stack slot and record it for possible later
741 MODE is the machine mode to be given to the returned rtx.
743 SIZE is the size in units of the space required. We do no rounding here
744 since assign_stack_local will do any required rounding.
746 TYPE is the type that will be used for the stack slot. */
749 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
753 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
756 /* If SIZE is -1 it means that somebody tried to allocate a temporary
757 of a variable size. */
758 gcc_assert (size
!= -1);
760 align
= get_stack_local_alignment (type
, mode
);
762 /* Try to find an available, already-allocated temporary of the proper
763 mode which meets the size and alignment requirements. Choose the
764 smallest one with the closest alignment.
766 If assign_stack_temp is called outside of the tree->rtl expansion,
767 we cannot reuse the stack slots (that may still refer to
768 VIRTUAL_STACK_VARS_REGNUM). */
769 if (!virtuals_instantiated
)
771 for (p
= avail_temp_slots
; p
; p
= p
->next
)
773 if (p
->align
>= align
&& p
->size
>= size
774 && GET_MODE (p
->slot
) == mode
775 && objects_must_conflict_p (p
->type
, type
)
776 && (best_p
== 0 || best_p
->size
> p
->size
777 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
779 if (p
->align
== align
&& p
->size
== size
)
782 cut_slot_from_list (selected
, &avail_temp_slots
);
791 /* Make our best, if any, the one to use. */
795 cut_slot_from_list (selected
, &avail_temp_slots
);
797 /* If there are enough aligned bytes left over, make them into a new
798 temp_slot so that the extra bytes don't get wasted. Do this only
799 for BLKmode slots, so that we can be sure of the alignment. */
800 if (GET_MODE (best_p
->slot
) == BLKmode
)
802 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
803 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
805 if (best_p
->size
- rounded_size
>= alignment
)
807 p
= ggc_alloc_temp_slot ();
809 p
->size
= best_p
->size
- rounded_size
;
810 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
811 p
->full_size
= best_p
->full_size
- rounded_size
;
812 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
813 p
->align
= best_p
->align
;
814 p
->type
= best_p
->type
;
815 insert_slot_to_list (p
, &avail_temp_slots
);
817 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
820 best_p
->size
= rounded_size
;
821 best_p
->full_size
= rounded_size
;
826 /* If we still didn't find one, make a new temporary. */
829 HOST_WIDE_INT frame_offset_old
= frame_offset
;
831 p
= ggc_alloc_temp_slot ();
833 /* We are passing an explicit alignment request to assign_stack_local.
834 One side effect of that is assign_stack_local will not round SIZE
835 to ensure the frame offset remains suitably aligned.
837 So for requests which depended on the rounding of SIZE, we go ahead
838 and round it now. We also make sure ALIGNMENT is at least
839 BIGGEST_ALIGNMENT. */
840 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
841 p
->slot
= assign_stack_local_1 (mode
,
851 /* The following slot size computation is necessary because we don't
852 know the actual size of the temporary slot until assign_stack_local
853 has performed all the frame alignment and size rounding for the
854 requested temporary. Note that extra space added for alignment
855 can be either above or below this stack slot depending on which
856 way the frame grows. We include the extra space if and only if it
857 is above this slot. */
858 if (FRAME_GROWS_DOWNWARD
)
859 p
->size
= frame_offset_old
- frame_offset
;
863 /* Now define the fields used by combine_temp_slots. */
864 if (FRAME_GROWS_DOWNWARD
)
866 p
->base_offset
= frame_offset
;
867 p
->full_size
= frame_offset_old
- frame_offset
;
871 p
->base_offset
= frame_offset_old
;
872 p
->full_size
= frame_offset
- frame_offset_old
;
881 p
->level
= temp_slot_level
;
882 n_temp_slots_in_use
++;
884 pp
= temp_slots_at_level (p
->level
);
885 insert_slot_to_list (p
, pp
);
886 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
888 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
889 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
890 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
892 /* If we know the alias set for the memory that will be used, use
893 it. If there's no TYPE, then we don't know anything about the
894 alias set for the memory. */
895 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
896 set_mem_align (slot
, align
);
898 /* If a type is specified, set the relevant flags. */
900 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
901 MEM_NOTRAP_P (slot
) = 1;
906 /* Allocate a temporary stack slot and record it for possible later
907 reuse. First two arguments are same as in preceding function. */
910 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
)
912 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
915 /* Assign a temporary.
916 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
917 and so that should be used in error messages. In either case, we
918 allocate of the given type.
919 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
920 it is 0 if a register is OK.
921 DONT_PROMOTE is 1 if we should not promote values in register
925 assign_temp (tree type_or_decl
, int memory_required
,
926 int dont_promote ATTRIBUTE_UNUSED
)
929 enum machine_mode mode
;
934 if (DECL_P (type_or_decl
))
935 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
937 decl
= NULL
, type
= type_or_decl
;
939 mode
= TYPE_MODE (type
);
941 unsignedp
= TYPE_UNSIGNED (type
);
944 if (mode
== BLKmode
|| memory_required
)
946 HOST_WIDE_INT size
= int_size_in_bytes (type
);
949 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
950 problems with allocating the stack space. */
954 /* Unfortunately, we don't yet know how to allocate variable-sized
955 temporaries. However, sometimes we can find a fixed upper limit on
956 the size, so try that instead. */
958 size
= max_int_size_in_bytes (type
);
960 /* The size of the temporary may be too large to fit into an integer. */
961 /* ??? Not sure this should happen except for user silliness, so limit
962 this to things that aren't compiler-generated temporaries. The
963 rest of the time we'll die in assign_stack_temp_for_type. */
964 if (decl
&& size
== -1
965 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
967 error ("size of variable %q+D is too large", decl
);
971 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
977 mode
= promote_mode (type
, mode
, &unsignedp
);
980 return gen_reg_rtx (mode
);
983 /* Combine temporary stack slots which are adjacent on the stack.
985 This allows for better use of already allocated stack space. This is only
986 done for BLKmode slots because we can be sure that we won't have alignment
987 problems in this case. */
990 combine_temp_slots (void)
992 struct temp_slot
*p
, *q
, *next
, *next_q
;
995 /* We can't combine slots, because the information about which slot
996 is in which alias set will be lost. */
997 if (flag_strict_aliasing
)
1000 /* If there are a lot of temp slots, don't do anything unless
1001 high levels of optimization. */
1002 if (! flag_expensive_optimizations
)
1003 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1004 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1007 for (p
= avail_temp_slots
; p
; p
= next
)
1013 if (GET_MODE (p
->slot
) != BLKmode
)
1016 for (q
= p
->next
; q
; q
= next_q
)
1022 if (GET_MODE (q
->slot
) != BLKmode
)
1025 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1027 /* Q comes after P; combine Q into P. */
1029 p
->full_size
+= q
->full_size
;
1032 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1034 /* P comes after Q; combine P into Q. */
1036 q
->full_size
+= p
->full_size
;
1041 cut_slot_from_list (q
, &avail_temp_slots
);
1044 /* Either delete P or advance past it. */
1046 cut_slot_from_list (p
, &avail_temp_slots
);
1050 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1051 slot that previously was known by OLD_RTX. */
1054 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1056 struct temp_slot
*p
;
1058 if (rtx_equal_p (old_rtx
, new_rtx
))
1061 p
= find_temp_slot_from_address (old_rtx
);
1063 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1064 NEW_RTX is a register, see if one operand of the PLUS is a
1065 temporary location. If so, NEW_RTX points into it. Otherwise,
1066 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1067 in common between them. If so, try a recursive call on those
1071 if (GET_CODE (old_rtx
) != PLUS
)
1074 if (REG_P (new_rtx
))
1076 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1077 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1080 else if (GET_CODE (new_rtx
) != PLUS
)
1083 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1084 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1085 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1086 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1087 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1088 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1089 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1090 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1095 /* Otherwise add an alias for the temp's address. */
1096 insert_temp_slot_address (new_rtx
, p
);
1099 /* If X could be a reference to a temporary slot, mark that slot as
1100 belonging to the to one level higher than the current level. If X
1101 matched one of our slots, just mark that one. Otherwise, we can't
1102 easily predict which it is, so upgrade all of them.
1104 This is called when an ({...}) construct occurs and a statement
1105 returns a value in memory. */
1108 preserve_temp_slots (rtx x
)
1110 struct temp_slot
*p
= 0, *next
;
1115 /* If X is a register that is being used as a pointer, see if we have
1116 a temporary slot we know it points to. */
1117 if (REG_P (x
) && REG_POINTER (x
))
1118 p
= find_temp_slot_from_address (x
);
1120 /* If X is not in memory or is at a constant address, it cannot be in
1121 a temporary slot. */
1122 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1125 /* First see if we can find a match. */
1127 p
= find_temp_slot_from_address (XEXP (x
, 0));
1131 if (p
->level
== temp_slot_level
)
1132 move_slot_to_level (p
, temp_slot_level
- 1);
1136 /* Otherwise, preserve all non-kept slots at this level. */
1137 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1140 move_slot_to_level (p
, temp_slot_level
- 1);
1144 /* Free all temporaries used so far. This is normally called at the
1145 end of generating code for a statement. */
1148 free_temp_slots (void)
1150 struct temp_slot
*p
, *next
;
1151 bool some_available
= false;
1153 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1156 make_slot_available (p
);
1157 some_available
= true;
1162 remove_unused_temp_slot_addresses ();
1163 combine_temp_slots ();
1167 /* Push deeper into the nesting level for stack temporaries. */
1170 push_temp_slots (void)
1175 /* Pop a temporary nesting level. All slots in use in the current level
1179 pop_temp_slots (void)
1185 /* Initialize temporary slots. */
1188 init_temp_slots (void)
1190 /* We have not allocated any temporaries yet. */
1191 avail_temp_slots
= 0;
1192 vec_alloc (used_temp_slots
, 0);
1193 temp_slot_level
= 0;
1194 n_temp_slots_in_use
= 0;
1196 /* Set up the table to map addresses to temp slots. */
1197 if (! temp_slot_address_table
)
1198 temp_slot_address_table
= htab_create_ggc (32,
1199 temp_slot_address_hash
,
1200 temp_slot_address_eq
,
1203 htab_empty (temp_slot_address_table
);
1206 /* Functions and data structures to keep track of the values hard regs
1207 had at the start of the function. */
1209 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1210 and has_hard_reg_initial_val.. */
1211 typedef struct GTY(()) initial_value_pair
{
1214 } initial_value_pair
;
1215 /* ??? This could be a VEC but there is currently no way to define an
1216 opaque VEC type. This could be worked around by defining struct
1217 initial_value_pair in function.h. */
1218 typedef struct GTY(()) initial_value_struct
{
1221 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1222 } initial_value_struct
;
1224 /* If a pseudo represents an initial hard reg (or expression), return
1225 it, else return NULL_RTX. */
1228 get_hard_reg_initial_reg (rtx reg
)
1230 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1236 for (i
= 0; i
< ivs
->num_entries
; i
++)
1237 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1238 return ivs
->entries
[i
].hard_reg
;
1243 /* Make sure that there's a pseudo register of mode MODE that stores the
1244 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1247 get_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1249 struct initial_value_struct
*ivs
;
1252 rv
= has_hard_reg_initial_val (mode
, regno
);
1256 ivs
= crtl
->hard_reg_initial_vals
;
1259 ivs
= ggc_alloc_initial_value_struct ();
1260 ivs
->num_entries
= 0;
1261 ivs
->max_entries
= 5;
1262 ivs
->entries
= ggc_alloc_vec_initial_value_pair (5);
1263 crtl
->hard_reg_initial_vals
= ivs
;
1266 if (ivs
->num_entries
>= ivs
->max_entries
)
1268 ivs
->max_entries
+= 5;
1269 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1273 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1274 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1276 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1279 /* See if get_hard_reg_initial_val has been used to create a pseudo
1280 for the initial value of hard register REGNO in mode MODE. Return
1281 the associated pseudo if so, otherwise return NULL. */
1284 has_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1286 struct initial_value_struct
*ivs
;
1289 ivs
= crtl
->hard_reg_initial_vals
;
1291 for (i
= 0; i
< ivs
->num_entries
; i
++)
1292 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1293 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1294 return ivs
->entries
[i
].pseudo
;
1300 emit_initial_value_sets (void)
1302 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1310 for (i
= 0; i
< ivs
->num_entries
; i
++)
1311 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1315 emit_insn_at_entry (seq
);
1319 /* Return the hardreg-pseudoreg initial values pair entry I and
1320 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1322 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1324 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1325 if (!ivs
|| i
>= ivs
->num_entries
)
1328 *hreg
= ivs
->entries
[i
].hard_reg
;
1329 *preg
= ivs
->entries
[i
].pseudo
;
1333 /* These routines are responsible for converting virtual register references
1334 to the actual hard register references once RTL generation is complete.
1336 The following four variables are used for communication between the
1337 routines. They contain the offsets of the virtual registers from their
1338 respective hard registers. */
1340 static int in_arg_offset
;
1341 static int var_offset
;
1342 static int dynamic_offset
;
1343 static int out_arg_offset
;
1344 static int cfa_offset
;
1346 /* In most machines, the stack pointer register is equivalent to the bottom
1349 #ifndef STACK_POINTER_OFFSET
1350 #define STACK_POINTER_OFFSET 0
1353 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1354 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1357 /* If not defined, pick an appropriate default for the offset of dynamically
1358 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1359 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1361 #ifndef STACK_DYNAMIC_OFFSET
1363 /* The bottom of the stack points to the actual arguments. If
1364 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1365 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1366 stack space for register parameters is not pushed by the caller, but
1367 rather part of the fixed stack areas and hence not included in
1368 `crtl->outgoing_args_size'. Nevertheless, we must allow
1369 for it when allocating stack dynamic objects. */
1371 #ifdef INCOMING_REG_PARM_STACK_SPACE
1372 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1373 ((ACCUMULATE_OUTGOING_ARGS \
1374 ? (crtl->outgoing_args_size \
1375 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1376 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1377 : 0) + (STACK_POINTER_OFFSET))
1379 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1380 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1381 + (STACK_POINTER_OFFSET))
1386 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1387 is a virtual register, return the equivalent hard register and set the
1388 offset indirectly through the pointer. Otherwise, return 0. */
1391 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1394 HOST_WIDE_INT offset
;
1396 if (x
== virtual_incoming_args_rtx
)
1398 if (stack_realign_drap
)
1400 /* Replace virtual_incoming_args_rtx with internal arg
1401 pointer if DRAP is used to realign stack. */
1402 new_rtx
= crtl
->args
.internal_arg_pointer
;
1406 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1408 else if (x
== virtual_stack_vars_rtx
)
1409 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1410 else if (x
== virtual_stack_dynamic_rtx
)
1411 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1412 else if (x
== virtual_outgoing_args_rtx
)
1413 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1414 else if (x
== virtual_cfa_rtx
)
1416 #ifdef FRAME_POINTER_CFA_OFFSET
1417 new_rtx
= frame_pointer_rtx
;
1419 new_rtx
= arg_pointer_rtx
;
1421 offset
= cfa_offset
;
1423 else if (x
== virtual_preferred_stack_boundary_rtx
)
1425 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1435 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1436 Instantiate any virtual registers present inside of *LOC. The expression
1437 is simplified, as much as possible, but is not to be considered "valid"
1438 in any sense implied by the target. If any change is made, set CHANGED
1442 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1444 HOST_WIDE_INT offset
;
1445 bool *changed
= (bool *) data
;
1452 switch (GET_CODE (x
))
1455 new_rtx
= instantiate_new_reg (x
, &offset
);
1458 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1465 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1468 new_rtx
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1469 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1475 /* FIXME -- from old code */
1476 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1477 we can commute the PLUS and SUBREG because pointers into the
1478 frame are well-behaved. */
1488 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1489 matches the predicate for insn CODE operand OPERAND. */
1492 safe_insn_predicate (int code
, int operand
, rtx x
)
1494 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1497 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1498 registers present inside of insn. The result will be a valid insn. */
1501 instantiate_virtual_regs_in_insn (rtx insn
)
1503 HOST_WIDE_INT offset
;
1505 bool any_change
= false;
1506 rtx set
, new_rtx
, x
, seq
;
1508 /* There are some special cases to be handled first. */
1509 set
= single_set (insn
);
1512 /* We're allowed to assign to a virtual register. This is interpreted
1513 to mean that the underlying register gets assigned the inverse
1514 transformation. This is used, for example, in the handling of
1516 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1521 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1522 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1523 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1524 x
= force_operand (x
, new_rtx
);
1526 emit_move_insn (new_rtx
, x
);
1531 emit_insn_before (seq
, insn
);
1536 /* Handle a straight copy from a virtual register by generating a
1537 new add insn. The difference between this and falling through
1538 to the generic case is avoiding a new pseudo and eliminating a
1539 move insn in the initial rtl stream. */
1540 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1541 if (new_rtx
&& offset
!= 0
1542 && REG_P (SET_DEST (set
))
1543 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1547 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1548 gen_int_mode (offset
,
1549 GET_MODE (SET_DEST (set
))),
1550 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1551 if (x
!= SET_DEST (set
))
1552 emit_move_insn (SET_DEST (set
), x
);
1557 emit_insn_before (seq
, insn
);
1562 extract_insn (insn
);
1563 insn_code
= INSN_CODE (insn
);
1565 /* Handle a plus involving a virtual register by determining if the
1566 operands remain valid if they're modified in place. */
1567 if (GET_CODE (SET_SRC (set
)) == PLUS
1568 && recog_data
.n_operands
>= 3
1569 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1570 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1571 && CONST_INT_P (recog_data
.operand
[2])
1572 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1574 offset
+= INTVAL (recog_data
.operand
[2]);
1576 /* If the sum is zero, then replace with a plain move. */
1578 && REG_P (SET_DEST (set
))
1579 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1582 emit_move_insn (SET_DEST (set
), new_rtx
);
1586 emit_insn_before (seq
, insn
);
1591 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1593 /* Using validate_change and apply_change_group here leaves
1594 recog_data in an invalid state. Since we know exactly what
1595 we want to check, do those two by hand. */
1596 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1597 && safe_insn_predicate (insn_code
, 2, x
))
1599 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1600 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1603 /* Fall through into the regular operand fixup loop in
1604 order to take care of operands other than 1 and 2. */
1610 extract_insn (insn
);
1611 insn_code
= INSN_CODE (insn
);
1614 /* In the general case, we expect virtual registers to appear only in
1615 operands, and then only as either bare registers or inside memories. */
1616 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1618 x
= recog_data
.operand
[i
];
1619 switch (GET_CODE (x
))
1623 rtx addr
= XEXP (x
, 0);
1624 bool changed
= false;
1626 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1631 x
= replace_equiv_address (x
, addr
);
1632 /* It may happen that the address with the virtual reg
1633 was valid (e.g. based on the virtual stack reg, which might
1634 be acceptable to the predicates with all offsets), whereas
1635 the address now isn't anymore, for instance when the address
1636 is still offsetted, but the base reg isn't virtual-stack-reg
1637 anymore. Below we would do a force_reg on the whole operand,
1638 but this insn might actually only accept memory. Hence,
1639 before doing that last resort, try to reload the address into
1640 a register, so this operand stays a MEM. */
1641 if (!safe_insn_predicate (insn_code
, i
, x
))
1643 addr
= force_reg (GET_MODE (addr
), addr
);
1644 x
= replace_equiv_address (x
, addr
);
1649 emit_insn_before (seq
, insn
);
1654 new_rtx
= instantiate_new_reg (x
, &offset
);
1655 if (new_rtx
== NULL
)
1663 /* Careful, special mode predicates may have stuff in
1664 insn_data[insn_code].operand[i].mode that isn't useful
1665 to us for computing a new value. */
1666 /* ??? Recognize address_operand and/or "p" constraints
1667 to see if (plus new offset) is a valid before we put
1668 this through expand_simple_binop. */
1669 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1670 gen_int_mode (offset
, GET_MODE (x
)),
1671 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1674 emit_insn_before (seq
, insn
);
1679 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1680 if (new_rtx
== NULL
)
1685 new_rtx
= expand_simple_binop
1686 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1687 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1688 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1691 emit_insn_before (seq
, insn
);
1693 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1694 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1702 /* At this point, X contains the new value for the operand.
1703 Validate the new value vs the insn predicate. Note that
1704 asm insns will have insn_code -1 here. */
1705 if (!safe_insn_predicate (insn_code
, i
, x
))
1710 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1711 x
= copy_to_reg (x
);
1714 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1718 emit_insn_before (seq
, insn
);
1721 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1727 /* Propagate operand changes into the duplicates. */
1728 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1729 *recog_data
.dup_loc
[i
]
1730 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1732 /* Force re-recognition of the instruction for validation. */
1733 INSN_CODE (insn
) = -1;
1736 if (asm_noperands (PATTERN (insn
)) >= 0)
1738 if (!check_asm_operands (PATTERN (insn
)))
1740 error_for_asm (insn
, "impossible constraint in %<asm%>");
1741 /* For asm goto, instead of fixing up all the edges
1742 just clear the template and clear input operands
1743 (asm goto doesn't have any output operands). */
1746 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1747 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1748 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1749 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1757 if (recog_memoized (insn
) < 0)
1758 fatal_insn_not_found (insn
);
1762 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1763 do any instantiation required. */
1766 instantiate_decl_rtl (rtx x
)
1773 /* If this is a CONCAT, recurse for the pieces. */
1774 if (GET_CODE (x
) == CONCAT
)
1776 instantiate_decl_rtl (XEXP (x
, 0));
1777 instantiate_decl_rtl (XEXP (x
, 1));
1781 /* If this is not a MEM, no need to do anything. Similarly if the
1782 address is a constant or a register that is not a virtual register. */
1787 if (CONSTANT_P (addr
)
1789 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1790 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1793 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1796 /* Helper for instantiate_decls called via walk_tree: Process all decls
1797 in the given DECL_VALUE_EXPR. */
1800 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1808 if (DECL_RTL_SET_P (t
))
1809 instantiate_decl_rtl (DECL_RTL (t
));
1810 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1811 && DECL_INCOMING_RTL (t
))
1812 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1813 if ((TREE_CODE (t
) == VAR_DECL
1814 || TREE_CODE (t
) == RESULT_DECL
)
1815 && DECL_HAS_VALUE_EXPR_P (t
))
1817 tree v
= DECL_VALUE_EXPR (t
);
1818 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1825 /* Subroutine of instantiate_decls: Process all decls in the given
1826 BLOCK node and all its subblocks. */
1829 instantiate_decls_1 (tree let
)
1833 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1835 if (DECL_RTL_SET_P (t
))
1836 instantiate_decl_rtl (DECL_RTL (t
));
1837 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1839 tree v
= DECL_VALUE_EXPR (t
);
1840 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1844 /* Process all subblocks. */
1845 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1846 instantiate_decls_1 (t
);
1849 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1850 all virtual registers in their DECL_RTL's. */
1853 instantiate_decls (tree fndecl
)
1858 /* Process all parameters of the function. */
1859 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1861 instantiate_decl_rtl (DECL_RTL (decl
));
1862 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1863 if (DECL_HAS_VALUE_EXPR_P (decl
))
1865 tree v
= DECL_VALUE_EXPR (decl
);
1866 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1870 if ((decl
= DECL_RESULT (fndecl
))
1871 && TREE_CODE (decl
) == RESULT_DECL
)
1873 if (DECL_RTL_SET_P (decl
))
1874 instantiate_decl_rtl (DECL_RTL (decl
));
1875 if (DECL_HAS_VALUE_EXPR_P (decl
))
1877 tree v
= DECL_VALUE_EXPR (decl
);
1878 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1882 /* Now process all variables defined in the function or its subblocks. */
1883 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1885 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1886 if (DECL_RTL_SET_P (decl
))
1887 instantiate_decl_rtl (DECL_RTL (decl
));
1888 vec_free (cfun
->local_decls
);
1891 /* Pass through the INSNS of function FNDECL and convert virtual register
1892 references to hard register references. */
1895 instantiate_virtual_regs (void)
1899 /* Compute the offsets to use for this function. */
1900 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1901 var_offset
= STARTING_FRAME_OFFSET
;
1902 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1903 out_arg_offset
= STACK_POINTER_OFFSET
;
1904 #ifdef FRAME_POINTER_CFA_OFFSET
1905 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1907 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1910 /* Initialize recognition, indicating that volatile is OK. */
1913 /* Scan through all the insns, instantiating every virtual register still
1915 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1918 /* These patterns in the instruction stream can never be recognized.
1919 Fortunately, they shouldn't contain virtual registers either. */
1920 if (GET_CODE (PATTERN (insn
)) == USE
1921 || GET_CODE (PATTERN (insn
)) == CLOBBER
1922 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1924 else if (DEBUG_INSN_P (insn
))
1925 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1926 instantiate_virtual_regs_in_rtx
, NULL
);
1928 instantiate_virtual_regs_in_insn (insn
);
1930 if (INSN_DELETED_P (insn
))
1933 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1935 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1937 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1938 instantiate_virtual_regs_in_rtx
, NULL
);
1941 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1942 instantiate_decls (current_function_decl
);
1944 targetm
.instantiate_decls ();
1946 /* Indicate that, from now on, assign_stack_local should use
1947 frame_pointer_rtx. */
1948 virtuals_instantiated
= 1;
1955 const pass_data pass_data_instantiate_virtual_regs
=
1957 RTL_PASS
, /* type */
1959 OPTGROUP_NONE
, /* optinfo_flags */
1960 false, /* has_gate */
1961 true, /* has_execute */
1962 TV_NONE
, /* tv_id */
1963 0, /* properties_required */
1964 0, /* properties_provided */
1965 0, /* properties_destroyed */
1966 0, /* todo_flags_start */
1967 0, /* todo_flags_finish */
1970 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1973 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1974 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1977 /* opt_pass methods: */
1978 unsigned int execute () { return instantiate_virtual_regs (); }
1980 }; // class pass_instantiate_virtual_regs
1985 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1987 return new pass_instantiate_virtual_regs (ctxt
);
1991 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1992 This means a type for which function calls must pass an address to the
1993 function or get an address back from the function.
1994 EXP may be a type node or an expression (whose type is tested). */
1997 aggregate_value_p (const_tree exp
, const_tree fntype
)
1999 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2000 int i
, regno
, nregs
;
2004 switch (TREE_CODE (fntype
))
2008 tree fndecl
= get_callee_fndecl (fntype
);
2010 ? TREE_TYPE (fndecl
)
2011 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
2015 fntype
= TREE_TYPE (fntype
);
2020 case IDENTIFIER_NODE
:
2024 /* We don't expect other tree types here. */
2028 if (VOID_TYPE_P (type
))
2031 /* If a record should be passed the same as its first (and only) member
2032 don't pass it as an aggregate. */
2033 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2034 return aggregate_value_p (first_field (type
), fntype
);
2036 /* If the front end has decided that this needs to be passed by
2037 reference, do so. */
2038 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2039 && DECL_BY_REFERENCE (exp
))
2042 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2043 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2046 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2047 and thus can't be returned in registers. */
2048 if (TREE_ADDRESSABLE (type
))
2051 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2054 if (targetm
.calls
.return_in_memory (type
, fntype
))
2057 /* Make sure we have suitable call-clobbered regs to return
2058 the value in; if not, we must return it in memory. */
2059 reg
= hard_function_value (type
, 0, fntype
, 0);
2061 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2066 regno
= REGNO (reg
);
2067 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2068 for (i
= 0; i
< nregs
; i
++)
2069 if (! call_used_regs
[regno
+ i
])
2075 /* Return true if we should assign DECL a pseudo register; false if it
2076 should live on the local stack. */
2079 use_register_for_decl (const_tree decl
)
2081 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2084 /* Honor volatile. */
2085 if (TREE_SIDE_EFFECTS (decl
))
2088 /* Honor addressability. */
2089 if (TREE_ADDRESSABLE (decl
))
2092 /* Only register-like things go in registers. */
2093 if (DECL_MODE (decl
) == BLKmode
)
2096 /* If -ffloat-store specified, don't put explicit float variables
2098 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2099 propagates values across these stores, and it probably shouldn't. */
2100 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2103 /* If we're not interested in tracking debugging information for
2104 this decl, then we can certainly put it in a register. */
2105 if (DECL_IGNORED_P (decl
))
2111 if (!DECL_REGISTER (decl
))
2114 switch (TREE_CODE (TREE_TYPE (decl
)))
2118 case QUAL_UNION_TYPE
:
2119 /* When not optimizing, disregard register keyword for variables with
2120 types containing methods, otherwise the methods won't be callable
2121 from the debugger. */
2122 if (TYPE_METHODS (TREE_TYPE (decl
)))
2132 /* Return true if TYPE should be passed by invisible reference. */
2135 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2136 tree type
, bool named_arg
)
2140 /* If this type contains non-trivial constructors, then it is
2141 forbidden for the middle-end to create any new copies. */
2142 if (TREE_ADDRESSABLE (type
))
2145 /* GCC post 3.4 passes *all* variable sized types by reference. */
2146 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2149 /* If a record type should be passed the same as its first (and only)
2150 member, use the type and mode of that member. */
2151 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2153 type
= TREE_TYPE (first_field (type
));
2154 mode
= TYPE_MODE (type
);
2158 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2162 /* Return true if TYPE, which is passed by reference, should be callee
2163 copied instead of caller copied. */
2166 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2167 tree type
, bool named_arg
)
2169 if (type
&& TREE_ADDRESSABLE (type
))
2171 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2175 /* Structures to communicate between the subroutines of assign_parms.
2176 The first holds data persistent across all parameters, the second
2177 is cleared out for each parameter. */
2179 struct assign_parm_data_all
2181 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2182 should become a job of the target or otherwise encapsulated. */
2183 CUMULATIVE_ARGS args_so_far_v
;
2184 cumulative_args_t args_so_far
;
2185 struct args_size stack_args_size
;
2186 tree function_result_decl
;
2188 rtx first_conversion_insn
;
2189 rtx last_conversion_insn
;
2190 HOST_WIDE_INT pretend_args_size
;
2191 HOST_WIDE_INT extra_pretend_bytes
;
2192 int reg_parm_stack_space
;
2195 struct assign_parm_data_one
2201 enum machine_mode nominal_mode
;
2202 enum machine_mode passed_mode
;
2203 enum machine_mode promoted_mode
;
2204 struct locate_and_pad_arg_data locate
;
2206 BOOL_BITFIELD named_arg
: 1;
2207 BOOL_BITFIELD passed_pointer
: 1;
2208 BOOL_BITFIELD on_stack
: 1;
2209 BOOL_BITFIELD loaded_in_reg
: 1;
2212 /* A subroutine of assign_parms. Initialize ALL. */
2215 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2217 tree fntype ATTRIBUTE_UNUSED
;
2219 memset (all
, 0, sizeof (*all
));
2221 fntype
= TREE_TYPE (current_function_decl
);
2223 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2224 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2226 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2227 current_function_decl
, -1);
2229 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2231 #ifdef INCOMING_REG_PARM_STACK_SPACE
2232 all
->reg_parm_stack_space
2233 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2237 /* If ARGS contains entries with complex types, split the entry into two
2238 entries of the component type. Return a new list of substitutions are
2239 needed, else the old list. */
2242 split_complex_args (vec
<tree
> *args
)
2247 FOR_EACH_VEC_ELT (*args
, i
, p
)
2249 tree type
= TREE_TYPE (p
);
2250 if (TREE_CODE (type
) == COMPLEX_TYPE
2251 && targetm
.calls
.split_complex_arg (type
))
2254 tree subtype
= TREE_TYPE (type
);
2255 bool addressable
= TREE_ADDRESSABLE (p
);
2257 /* Rewrite the PARM_DECL's type with its component. */
2259 TREE_TYPE (p
) = subtype
;
2260 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2261 DECL_MODE (p
) = VOIDmode
;
2262 DECL_SIZE (p
) = NULL
;
2263 DECL_SIZE_UNIT (p
) = NULL
;
2264 /* If this arg must go in memory, put it in a pseudo here.
2265 We can't allow it to go in memory as per normal parms,
2266 because the usual place might not have the imag part
2267 adjacent to the real part. */
2268 DECL_ARTIFICIAL (p
) = addressable
;
2269 DECL_IGNORED_P (p
) = addressable
;
2270 TREE_ADDRESSABLE (p
) = 0;
2274 /* Build a second synthetic decl. */
2275 decl
= build_decl (EXPR_LOCATION (p
),
2276 PARM_DECL
, NULL_TREE
, subtype
);
2277 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2278 DECL_ARTIFICIAL (decl
) = addressable
;
2279 DECL_IGNORED_P (decl
) = addressable
;
2280 layout_decl (decl
, 0);
2281 args
->safe_insert (++i
, decl
);
2286 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2287 the hidden struct return argument, and (abi willing) complex args.
2288 Return the new parameter list. */
2291 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2293 tree fndecl
= current_function_decl
;
2294 tree fntype
= TREE_TYPE (fndecl
);
2295 vec
<tree
> fnargs
= vNULL
;
2298 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2299 fnargs
.safe_push (arg
);
2301 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2303 /* If struct value address is treated as the first argument, make it so. */
2304 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2305 && ! cfun
->returns_pcc_struct
2306 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2308 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2311 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2312 PARM_DECL
, get_identifier (".result_ptr"), type
);
2313 DECL_ARG_TYPE (decl
) = type
;
2314 DECL_ARTIFICIAL (decl
) = 1;
2315 DECL_NAMELESS (decl
) = 1;
2316 TREE_CONSTANT (decl
) = 1;
2318 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2319 all
->orig_fnargs
= decl
;
2320 fnargs
.safe_insert (0, decl
);
2322 all
->function_result_decl
= decl
;
2325 /* If the target wants to split complex arguments into scalars, do so. */
2326 if (targetm
.calls
.split_complex_arg
)
2327 split_complex_args (&fnargs
);
2332 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2333 data for the parameter. Incorporate ABI specifics such as pass-by-
2334 reference and type promotion. */
2337 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2338 struct assign_parm_data_one
*data
)
2340 tree nominal_type
, passed_type
;
2341 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2344 memset (data
, 0, sizeof (*data
));
2346 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2348 data
->named_arg
= 1; /* No variadic parms. */
2349 else if (DECL_CHAIN (parm
))
2350 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2351 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2352 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2354 data
->named_arg
= 0; /* Treat as variadic. */
2356 nominal_type
= TREE_TYPE (parm
);
2357 passed_type
= DECL_ARG_TYPE (parm
);
2359 /* Look out for errors propagating this far. Also, if the parameter's
2360 type is void then its value doesn't matter. */
2361 if (TREE_TYPE (parm
) == error_mark_node
2362 /* This can happen after weird syntax errors
2363 or if an enum type is defined among the parms. */
2364 || TREE_CODE (parm
) != PARM_DECL
2365 || passed_type
== NULL
2366 || VOID_TYPE_P (nominal_type
))
2368 nominal_type
= passed_type
= void_type_node
;
2369 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2373 /* Find mode of arg as it is passed, and mode of arg as it should be
2374 during execution of this function. */
2375 passed_mode
= TYPE_MODE (passed_type
);
2376 nominal_mode
= TYPE_MODE (nominal_type
);
2378 /* If the parm is to be passed as a transparent union or record, use the
2379 type of the first field for the tests below. We have already verified
2380 that the modes are the same. */
2381 if ((TREE_CODE (passed_type
) == UNION_TYPE
2382 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2383 && TYPE_TRANSPARENT_AGGR (passed_type
))
2384 passed_type
= TREE_TYPE (first_field (passed_type
));
2386 /* See if this arg was passed by invisible reference. */
2387 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2388 passed_type
, data
->named_arg
))
2390 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2391 data
->passed_pointer
= true;
2392 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2395 /* Find mode as it is passed by the ABI. */
2396 unsignedp
= TYPE_UNSIGNED (passed_type
);
2397 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2398 TREE_TYPE (current_function_decl
), 0);
2401 data
->nominal_type
= nominal_type
;
2402 data
->passed_type
= passed_type
;
2403 data
->nominal_mode
= nominal_mode
;
2404 data
->passed_mode
= passed_mode
;
2405 data
->promoted_mode
= promoted_mode
;
2408 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2411 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2412 struct assign_parm_data_one
*data
, bool no_rtl
)
2414 int varargs_pretend_bytes
= 0;
2416 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2417 data
->promoted_mode
,
2419 &varargs_pretend_bytes
, no_rtl
);
2421 /* If the back-end has requested extra stack space, record how much is
2422 needed. Do not change pretend_args_size otherwise since it may be
2423 nonzero from an earlier partial argument. */
2424 if (varargs_pretend_bytes
> 0)
2425 all
->pretend_args_size
= varargs_pretend_bytes
;
2428 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2429 the incoming location of the current parameter. */
2432 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2433 struct assign_parm_data_one
*data
)
2435 HOST_WIDE_INT pretend_bytes
= 0;
2439 if (data
->promoted_mode
== VOIDmode
)
2441 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2445 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2446 data
->promoted_mode
,
2450 if (entry_parm
== 0)
2451 data
->promoted_mode
= data
->passed_mode
;
2453 /* Determine parm's home in the stack, in case it arrives in the stack
2454 or we should pretend it did. Compute the stack position and rtx where
2455 the argument arrives and its size.
2457 There is one complexity here: If this was a parameter that would
2458 have been passed in registers, but wasn't only because it is
2459 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2460 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2461 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2462 as it was the previous time. */
2463 in_regs
= entry_parm
!= 0;
2464 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2467 if (!in_regs
&& !data
->named_arg
)
2469 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2472 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2473 data
->promoted_mode
,
2474 data
->passed_type
, true);
2475 in_regs
= tem
!= NULL
;
2479 /* If this parameter was passed both in registers and in the stack, use
2480 the copy on the stack. */
2481 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2489 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2490 data
->promoted_mode
,
2493 data
->partial
= partial
;
2495 /* The caller might already have allocated stack space for the
2496 register parameters. */
2497 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2499 /* Part of this argument is passed in registers and part
2500 is passed on the stack. Ask the prologue code to extend
2501 the stack part so that we can recreate the full value.
2503 PRETEND_BYTES is the size of the registers we need to store.
2504 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2505 stack space that the prologue should allocate.
2507 Internally, gcc assumes that the argument pointer is aligned
2508 to STACK_BOUNDARY bits. This is used both for alignment
2509 optimizations (see init_emit) and to locate arguments that are
2510 aligned to more than PARM_BOUNDARY bits. We must preserve this
2511 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2512 a stack boundary. */
2514 /* We assume at most one partial arg, and it must be the first
2515 argument on the stack. */
2516 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2518 pretend_bytes
= partial
;
2519 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2521 /* We want to align relative to the actual stack pointer, so
2522 don't include this in the stack size until later. */
2523 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2527 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2528 all
->reg_parm_stack_space
,
2529 entry_parm
? data
->partial
: 0, current_function_decl
,
2530 &all
->stack_args_size
, &data
->locate
);
2532 /* Update parm_stack_boundary if this parameter is passed in the
2534 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2535 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2537 /* Adjust offsets to include the pretend args. */
2538 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2539 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2540 data
->locate
.offset
.constant
+= pretend_bytes
;
2542 data
->entry_parm
= entry_parm
;
2545 /* A subroutine of assign_parms. If there is actually space on the stack
2546 for this parm, count it in stack_args_size and return true. */
2549 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2550 struct assign_parm_data_one
*data
)
2552 /* Trivially true if we've no incoming register. */
2553 if (data
->entry_parm
== NULL
)
2555 /* Also true if we're partially in registers and partially not,
2556 since we've arranged to drop the entire argument on the stack. */
2557 else if (data
->partial
!= 0)
2559 /* Also true if the target says that it's passed in both registers
2560 and on the stack. */
2561 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2562 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2564 /* Also true if the target says that there's stack allocated for
2565 all register parameters. */
2566 else if (all
->reg_parm_stack_space
> 0)
2568 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2572 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2573 if (data
->locate
.size
.var
)
2574 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2579 /* A subroutine of assign_parms. Given that this parameter is allocated
2580 stack space by the ABI, find it. */
2583 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2585 rtx offset_rtx
, stack_parm
;
2586 unsigned int align
, boundary
;
2588 /* If we're passing this arg using a reg, make its stack home the
2589 aligned stack slot. */
2590 if (data
->entry_parm
)
2591 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2593 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2595 stack_parm
= crtl
->args
.internal_arg_pointer
;
2596 if (offset_rtx
!= const0_rtx
)
2597 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2598 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2600 if (!data
->passed_pointer
)
2602 set_mem_attributes (stack_parm
, parm
, 1);
2603 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2604 while promoted mode's size is needed. */
2605 if (data
->promoted_mode
!= BLKmode
2606 && data
->promoted_mode
!= DECL_MODE (parm
))
2608 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2609 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2611 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2612 data
->promoted_mode
);
2614 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2619 boundary
= data
->locate
.boundary
;
2620 align
= BITS_PER_UNIT
;
2622 /* If we're padding upward, we know that the alignment of the slot
2623 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2624 intentionally forcing upward padding. Otherwise we have to come
2625 up with a guess at the alignment based on OFFSET_RTX. */
2626 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2628 else if (CONST_INT_P (offset_rtx
))
2630 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2631 align
= align
& -align
;
2633 set_mem_align (stack_parm
, align
);
2635 if (data
->entry_parm
)
2636 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2638 data
->stack_parm
= stack_parm
;
2641 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2642 always valid and contiguous. */
2645 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2647 rtx entry_parm
= data
->entry_parm
;
2648 rtx stack_parm
= data
->stack_parm
;
2650 /* If this parm was passed part in regs and part in memory, pretend it
2651 arrived entirely in memory by pushing the register-part onto the stack.
2652 In the special case of a DImode or DFmode that is split, we could put
2653 it together in a pseudoreg directly, but for now that's not worth
2655 if (data
->partial
!= 0)
2657 /* Handle calls that pass values in multiple non-contiguous
2658 locations. The Irix 6 ABI has examples of this. */
2659 if (GET_CODE (entry_parm
) == PARALLEL
)
2660 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2662 int_size_in_bytes (data
->passed_type
));
2665 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2666 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2667 data
->partial
/ UNITS_PER_WORD
);
2670 entry_parm
= stack_parm
;
2673 /* If we didn't decide this parm came in a register, by default it came
2675 else if (entry_parm
== NULL
)
2676 entry_parm
= stack_parm
;
2678 /* When an argument is passed in multiple locations, we can't make use
2679 of this information, but we can save some copying if the whole argument
2680 is passed in a single register. */
2681 else if (GET_CODE (entry_parm
) == PARALLEL
2682 && data
->nominal_mode
!= BLKmode
2683 && data
->passed_mode
!= BLKmode
)
2685 size_t i
, len
= XVECLEN (entry_parm
, 0);
2687 for (i
= 0; i
< len
; i
++)
2688 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2689 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2690 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2691 == data
->passed_mode
)
2692 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2694 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2699 data
->entry_parm
= entry_parm
;
2702 /* A subroutine of assign_parms. Reconstitute any values which were
2703 passed in multiple registers and would fit in a single register. */
2706 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2708 rtx entry_parm
= data
->entry_parm
;
2710 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2711 This can be done with register operations rather than on the
2712 stack, even if we will store the reconstituted parameter on the
2714 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2716 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2717 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2718 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2719 entry_parm
= parmreg
;
2722 data
->entry_parm
= entry_parm
;
2725 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2726 always valid and properly aligned. */
2729 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2731 rtx stack_parm
= data
->stack_parm
;
2733 /* If we can't trust the parm stack slot to be aligned enough for its
2734 ultimate type, don't use that slot after entry. We'll make another
2735 stack slot, if we need one. */
2737 && ((STRICT_ALIGNMENT
2738 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2739 || (data
->nominal_type
2740 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2741 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2744 /* If parm was passed in memory, and we need to convert it on entry,
2745 don't store it back in that same slot. */
2746 else if (data
->entry_parm
== stack_parm
2747 && data
->nominal_mode
!= BLKmode
2748 && data
->nominal_mode
!= data
->passed_mode
)
2751 /* If stack protection is in effect for this function, don't leave any
2752 pointers in their passed stack slots. */
2753 else if (crtl
->stack_protect_guard
2754 && (flag_stack_protect
== 2
2755 || data
->passed_pointer
2756 || POINTER_TYPE_P (data
->nominal_type
)))
2759 data
->stack_parm
= stack_parm
;
2762 /* A subroutine of assign_parms. Return true if the current parameter
2763 should be stored as a BLKmode in the current frame. */
2766 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2768 if (data
->nominal_mode
== BLKmode
)
2770 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2773 #ifdef BLOCK_REG_PADDING
2774 /* Only assign_parm_setup_block knows how to deal with register arguments
2775 that are padded at the least significant end. */
2776 if (REG_P (data
->entry_parm
)
2777 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2778 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2779 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2786 /* A subroutine of assign_parms. Arrange for the parameter to be
2787 present and valid in DATA->STACK_RTL. */
2790 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2791 tree parm
, struct assign_parm_data_one
*data
)
2793 rtx entry_parm
= data
->entry_parm
;
2794 rtx stack_parm
= data
->stack_parm
;
2796 HOST_WIDE_INT size_stored
;
2798 if (GET_CODE (entry_parm
) == PARALLEL
)
2799 entry_parm
= emit_group_move_into_temps (entry_parm
);
2801 size
= int_size_in_bytes (data
->passed_type
);
2802 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2803 if (stack_parm
== 0)
2805 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2806 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2808 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2809 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2810 set_mem_attributes (stack_parm
, parm
, 1);
2813 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2814 calls that pass values in multiple non-contiguous locations. */
2815 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2819 /* Note that we will be storing an integral number of words.
2820 So we have to be careful to ensure that we allocate an
2821 integral number of words. We do this above when we call
2822 assign_stack_local if space was not allocated in the argument
2823 list. If it was, this will not work if PARM_BOUNDARY is not
2824 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2825 if it becomes a problem. Exception is when BLKmode arrives
2826 with arguments not conforming to word_mode. */
2828 if (data
->stack_parm
== 0)
2830 else if (GET_CODE (entry_parm
) == PARALLEL
)
2833 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2835 mem
= validize_mem (stack_parm
);
2837 /* Handle values in multiple non-contiguous locations. */
2838 if (GET_CODE (entry_parm
) == PARALLEL
)
2840 push_to_sequence2 (all
->first_conversion_insn
,
2841 all
->last_conversion_insn
);
2842 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2843 all
->first_conversion_insn
= get_insns ();
2844 all
->last_conversion_insn
= get_last_insn ();
2851 /* If SIZE is that of a mode no bigger than a word, just use
2852 that mode's store operation. */
2853 else if (size
<= UNITS_PER_WORD
)
2855 enum machine_mode mode
2856 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2859 #ifdef BLOCK_REG_PADDING
2860 && (size
== UNITS_PER_WORD
2861 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2862 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2868 /* We are really truncating a word_mode value containing
2869 SIZE bytes into a value of mode MODE. If such an
2870 operation requires no actual instructions, we can refer
2871 to the value directly in mode MODE, otherwise we must
2872 start with the register in word_mode and explicitly
2874 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2875 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2878 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2879 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2881 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2884 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2885 machine must be aligned to the left before storing
2886 to memory. Note that the previous test doesn't
2887 handle all cases (e.g. SIZE == 3). */
2888 else if (size
!= UNITS_PER_WORD
2889 #ifdef BLOCK_REG_PADDING
2890 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2898 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2899 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2901 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2902 tem
= change_address (mem
, word_mode
, 0);
2903 emit_move_insn (tem
, x
);
2906 move_block_from_reg (REGNO (entry_parm
), mem
,
2907 size_stored
/ UNITS_PER_WORD
);
2910 move_block_from_reg (REGNO (entry_parm
), mem
,
2911 size_stored
/ UNITS_PER_WORD
);
2913 else if (data
->stack_parm
== 0)
2915 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2916 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2918 all
->first_conversion_insn
= get_insns ();
2919 all
->last_conversion_insn
= get_last_insn ();
2923 data
->stack_parm
= stack_parm
;
2924 SET_DECL_RTL (parm
, stack_parm
);
2927 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2928 parameter. Get it there. Perform all ABI specified conversions. */
2931 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2932 struct assign_parm_data_one
*data
)
2934 rtx parmreg
, validated_mem
;
2935 rtx equiv_stack_parm
;
2936 enum machine_mode promoted_nominal_mode
;
2937 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2938 bool did_conversion
= false;
2939 bool need_conversion
, moved
;
2941 /* Store the parm in a pseudoregister during the function, but we may
2942 need to do it in a wider mode. Using 2 here makes the result
2943 consistent with promote_decl_mode and thus expand_expr_real_1. */
2944 promoted_nominal_mode
2945 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2946 TREE_TYPE (current_function_decl
), 2);
2948 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2950 if (!DECL_ARTIFICIAL (parm
))
2951 mark_user_reg (parmreg
);
2953 /* If this was an item that we received a pointer to,
2954 set DECL_RTL appropriately. */
2955 if (data
->passed_pointer
)
2957 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2958 set_mem_attributes (x
, parm
, 1);
2959 SET_DECL_RTL (parm
, x
);
2962 SET_DECL_RTL (parm
, parmreg
);
2964 assign_parm_remove_parallels (data
);
2966 /* Copy the value into the register, thus bridging between
2967 assign_parm_find_data_types and expand_expr_real_1. */
2969 equiv_stack_parm
= data
->stack_parm
;
2970 validated_mem
= validize_mem (data
->entry_parm
);
2972 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2973 || promoted_nominal_mode
!= data
->promoted_mode
);
2977 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2978 && data
->nominal_mode
== data
->passed_mode
2979 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2981 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2982 mode, by the caller. We now have to convert it to
2983 NOMINAL_MODE, if different. However, PARMREG may be in
2984 a different mode than NOMINAL_MODE if it is being stored
2987 If ENTRY_PARM is a hard register, it might be in a register
2988 not valid for operating in its mode (e.g., an odd-numbered
2989 register for a DFmode). In that case, moves are the only
2990 thing valid, so we can't do a convert from there. This
2991 occurs when the calling sequence allow such misaligned
2994 In addition, the conversion may involve a call, which could
2995 clobber parameters which haven't been copied to pseudo
2998 First, we try to emit an insn which performs the necessary
2999 conversion. We verify that this insn does not clobber any
3002 enum insn_code icode
;
3005 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3009 op1
= validated_mem
;
3010 if (icode
!= CODE_FOR_nothing
3011 && insn_operand_matches (icode
, 0, op0
)
3012 && insn_operand_matches (icode
, 1, op1
))
3014 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3015 rtx insn
, insns
, t
= op1
;
3016 HARD_REG_SET hardregs
;
3019 /* If op1 is a hard register that is likely spilled, first
3020 force it into a pseudo, otherwise combiner might extend
3021 its lifetime too much. */
3022 if (GET_CODE (t
) == SUBREG
)
3025 && HARD_REGISTER_P (t
)
3026 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3027 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3029 t
= gen_reg_rtx (GET_MODE (op1
));
3030 emit_move_insn (t
, op1
);
3034 insn
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3035 data
->passed_mode
, unsignedp
);
3037 insns
= get_insns ();
3040 CLEAR_HARD_REG_SET (hardregs
);
3041 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3044 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3046 if (!hard_reg_set_empty_p (hardregs
))
3055 if (equiv_stack_parm
!= NULL_RTX
)
3056 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3063 /* Nothing to do. */
3065 else if (need_conversion
)
3067 /* We did not have an insn to convert directly, or the sequence
3068 generated appeared unsafe. We must first copy the parm to a
3069 pseudo reg, and save the conversion until after all
3070 parameters have been moved. */
3073 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3075 emit_move_insn (tempreg
, validated_mem
);
3077 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3078 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3080 if (GET_CODE (tempreg
) == SUBREG
3081 && GET_MODE (tempreg
) == data
->nominal_mode
3082 && REG_P (SUBREG_REG (tempreg
))
3083 && data
->nominal_mode
== data
->passed_mode
3084 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3085 && GET_MODE_SIZE (GET_MODE (tempreg
))
3086 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3088 /* The argument is already sign/zero extended, so note it
3090 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3091 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3094 /* TREE_USED gets set erroneously during expand_assignment. */
3095 save_tree_used
= TREE_USED (parm
);
3096 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3097 TREE_USED (parm
) = save_tree_used
;
3098 all
->first_conversion_insn
= get_insns ();
3099 all
->last_conversion_insn
= get_last_insn ();
3102 did_conversion
= true;
3105 emit_move_insn (parmreg
, validated_mem
);
3107 /* If we were passed a pointer but the actual value can safely live
3108 in a register, retrieve it and use it directly. */
3109 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3111 /* We can't use nominal_mode, because it will have been set to
3112 Pmode above. We must use the actual mode of the parm. */
3113 if (use_register_for_decl (parm
))
3115 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3116 mark_user_reg (parmreg
);
3120 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3121 TYPE_MODE (TREE_TYPE (parm
)),
3122 TYPE_ALIGN (TREE_TYPE (parm
)));
3124 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3125 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3127 set_mem_attributes (parmreg
, parm
, 1);
3130 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3132 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3133 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3135 push_to_sequence2 (all
->first_conversion_insn
,
3136 all
->last_conversion_insn
);
3137 emit_move_insn (tempreg
, DECL_RTL (parm
));
3138 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3139 emit_move_insn (parmreg
, tempreg
);
3140 all
->first_conversion_insn
= get_insns ();
3141 all
->last_conversion_insn
= get_last_insn ();
3144 did_conversion
= true;
3147 emit_move_insn (parmreg
, DECL_RTL (parm
));
3149 SET_DECL_RTL (parm
, parmreg
);
3151 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3153 data
->stack_parm
= NULL
;
3156 /* Mark the register as eliminable if we did no conversion and it was
3157 copied from memory at a fixed offset, and the arg pointer was not
3158 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3159 offset formed an invalid address, such memory-equivalences as we
3160 make here would screw up life analysis for it. */
3161 if (data
->nominal_mode
== data
->passed_mode
3163 && data
->stack_parm
!= 0
3164 && MEM_P (data
->stack_parm
)
3165 && data
->locate
.offset
.var
== 0
3166 && reg_mentioned_p (virtual_incoming_args_rtx
,
3167 XEXP (data
->stack_parm
, 0)))
3169 rtx linsn
= get_last_insn ();
3172 /* Mark complex types separately. */
3173 if (GET_CODE (parmreg
) == CONCAT
)
3175 enum machine_mode submode
3176 = GET_MODE_INNER (GET_MODE (parmreg
));
3177 int regnor
= REGNO (XEXP (parmreg
, 0));
3178 int regnoi
= REGNO (XEXP (parmreg
, 1));
3179 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3180 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3181 GET_MODE_SIZE (submode
));
3183 /* Scan backwards for the set of the real and
3185 for (sinsn
= linsn
; sinsn
!= 0;
3186 sinsn
= prev_nonnote_insn (sinsn
))
3188 set
= single_set (sinsn
);
3192 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3193 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3194 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3195 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3199 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3202 /* For pointer data type, suggest pointer register. */
3203 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3204 mark_reg_pointer (parmreg
,
3205 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3208 /* A subroutine of assign_parms. Allocate stack space to hold the current
3209 parameter. Get it there. Perform all ABI specified conversions. */
3212 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3213 struct assign_parm_data_one
*data
)
3215 /* Value must be stored in the stack slot STACK_PARM during function
3217 bool to_conversion
= false;
3219 assign_parm_remove_parallels (data
);
3221 if (data
->promoted_mode
!= data
->nominal_mode
)
3223 /* Conversion is required. */
3224 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3226 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3228 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3229 to_conversion
= true;
3231 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3232 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3234 if (data
->stack_parm
)
3236 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3237 GET_MODE (data
->stack_parm
));
3238 /* ??? This may need a big-endian conversion on sparc64. */
3240 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3241 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3242 set_mem_offset (data
->stack_parm
,
3243 MEM_OFFSET (data
->stack_parm
) + offset
);
3247 if (data
->entry_parm
!= data
->stack_parm
)
3251 if (data
->stack_parm
== 0)
3253 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3254 GET_MODE (data
->entry_parm
),
3255 TYPE_ALIGN (data
->passed_type
));
3257 = assign_stack_local (GET_MODE (data
->entry_parm
),
3258 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3260 set_mem_attributes (data
->stack_parm
, parm
, 1);
3263 dest
= validize_mem (data
->stack_parm
);
3264 src
= validize_mem (data
->entry_parm
);
3268 /* Use a block move to handle potentially misaligned entry_parm. */
3270 push_to_sequence2 (all
->first_conversion_insn
,
3271 all
->last_conversion_insn
);
3272 to_conversion
= true;
3274 emit_block_move (dest
, src
,
3275 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3279 emit_move_insn (dest
, src
);
3284 all
->first_conversion_insn
= get_insns ();
3285 all
->last_conversion_insn
= get_last_insn ();
3289 SET_DECL_RTL (parm
, data
->stack_parm
);
3292 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3293 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3296 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3300 tree orig_fnargs
= all
->orig_fnargs
;
3303 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3305 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3306 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3308 rtx tmp
, real
, imag
;
3309 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3311 real
= DECL_RTL (fnargs
[i
]);
3312 imag
= DECL_RTL (fnargs
[i
+ 1]);
3313 if (inner
!= GET_MODE (real
))
3315 real
= gen_lowpart_SUBREG (inner
, real
);
3316 imag
= gen_lowpart_SUBREG (inner
, imag
);
3319 if (TREE_ADDRESSABLE (parm
))
3322 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3323 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3325 TYPE_ALIGN (TREE_TYPE (parm
)));
3327 /* split_complex_arg put the real and imag parts in
3328 pseudos. Move them to memory. */
3329 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3330 set_mem_attributes (tmp
, parm
, 1);
3331 rmem
= adjust_address_nv (tmp
, inner
, 0);
3332 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3333 push_to_sequence2 (all
->first_conversion_insn
,
3334 all
->last_conversion_insn
);
3335 emit_move_insn (rmem
, real
);
3336 emit_move_insn (imem
, imag
);
3337 all
->first_conversion_insn
= get_insns ();
3338 all
->last_conversion_insn
= get_last_insn ();
3342 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3343 SET_DECL_RTL (parm
, tmp
);
3345 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3346 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3347 if (inner
!= GET_MODE (real
))
3349 real
= gen_lowpart_SUBREG (inner
, real
);
3350 imag
= gen_lowpart_SUBREG (inner
, imag
);
3352 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3353 set_decl_incoming_rtl (parm
, tmp
, false);
3359 /* Assign RTL expressions to the function's parameters. This may involve
3360 copying them into registers and using those registers as the DECL_RTL. */
3363 assign_parms (tree fndecl
)
3365 struct assign_parm_data_all all
;
3370 crtl
->args
.internal_arg_pointer
3371 = targetm
.calls
.internal_arg_pointer ();
3373 assign_parms_initialize_all (&all
);
3374 fnargs
= assign_parms_augmented_arg_list (&all
);
3376 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3378 struct assign_parm_data_one data
;
3380 /* Extract the type of PARM; adjust it according to ABI. */
3381 assign_parm_find_data_types (&all
, parm
, &data
);
3383 /* Early out for errors and void parameters. */
3384 if (data
.passed_mode
== VOIDmode
)
3386 SET_DECL_RTL (parm
, const0_rtx
);
3387 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3391 /* Estimate stack alignment from parameter alignment. */
3392 if (SUPPORTS_STACK_ALIGNMENT
)
3395 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3397 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3399 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3400 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3401 TYPE_MODE (data
.nominal_type
),
3402 TYPE_ALIGN (data
.nominal_type
));
3403 if (crtl
->stack_alignment_estimated
< align
)
3405 gcc_assert (!crtl
->stack_realign_processed
);
3406 crtl
->stack_alignment_estimated
= align
;
3410 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3411 assign_parms_setup_varargs (&all
, &data
, false);
3413 /* Find out where the parameter arrives in this function. */
3414 assign_parm_find_entry_rtl (&all
, &data
);
3416 /* Find out where stack space for this parameter might be. */
3417 if (assign_parm_is_stack_parm (&all
, &data
))
3419 assign_parm_find_stack_rtl (parm
, &data
);
3420 assign_parm_adjust_entry_rtl (&data
);
3423 /* Record permanently how this parm was passed. */
3424 if (data
.passed_pointer
)
3427 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3429 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3432 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3434 /* Update info on where next arg arrives in registers. */
3435 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3436 data
.passed_type
, data
.named_arg
);
3438 assign_parm_adjust_stack_rtl (&data
);
3440 if (assign_parm_setup_block_p (&data
))
3441 assign_parm_setup_block (&all
, parm
, &data
);
3442 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3443 assign_parm_setup_reg (&all
, parm
, &data
);
3445 assign_parm_setup_stack (&all
, parm
, &data
);
3448 if (targetm
.calls
.split_complex_arg
)
3449 assign_parms_unsplit_complex (&all
, fnargs
);
3453 /* Output all parameter conversion instructions (possibly including calls)
3454 now that all parameters have been copied out of hard registers. */
3455 emit_insn (all
.first_conversion_insn
);
3457 /* Estimate reload stack alignment from scalar return mode. */
3458 if (SUPPORTS_STACK_ALIGNMENT
)
3460 if (DECL_RESULT (fndecl
))
3462 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3463 enum machine_mode mode
= TYPE_MODE (type
);
3467 && !AGGREGATE_TYPE_P (type
))
3469 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3470 if (crtl
->stack_alignment_estimated
< align
)
3472 gcc_assert (!crtl
->stack_realign_processed
);
3473 crtl
->stack_alignment_estimated
= align
;
3479 /* If we are receiving a struct value address as the first argument, set up
3480 the RTL for the function result. As this might require code to convert
3481 the transmitted address to Pmode, we do this here to ensure that possible
3482 preliminary conversions of the address have been emitted already. */
3483 if (all
.function_result_decl
)
3485 tree result
= DECL_RESULT (current_function_decl
);
3486 rtx addr
= DECL_RTL (all
.function_result_decl
);
3489 if (DECL_BY_REFERENCE (result
))
3491 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3496 SET_DECL_VALUE_EXPR (result
,
3497 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3498 all
.function_result_decl
));
3499 addr
= convert_memory_address (Pmode
, addr
);
3500 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3501 set_mem_attributes (x
, result
, 1);
3504 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3506 SET_DECL_RTL (result
, x
);
3509 /* We have aligned all the args, so add space for the pretend args. */
3510 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3511 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3512 crtl
->args
.size
= all
.stack_args_size
.constant
;
3514 /* Adjust function incoming argument size for alignment and
3517 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3518 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3519 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3521 #ifdef ARGS_GROW_DOWNWARD
3522 crtl
->args
.arg_offset_rtx
3523 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3524 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3525 size_int (-all
.stack_args_size
.constant
)),
3526 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3528 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3531 /* See how many bytes, if any, of its args a function should try to pop
3534 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3538 /* For stdarg.h function, save info about
3539 regs and stack space used by the named args. */
3541 crtl
->args
.info
= all
.args_so_far_v
;
3543 /* Set the rtx used for the function return value. Put this in its
3544 own variable so any optimizers that need this information don't have
3545 to include tree.h. Do this here so it gets done when an inlined
3546 function gets output. */
3549 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3550 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3552 /* If scalar return value was computed in a pseudo-reg, or was a named
3553 return value that got dumped to the stack, copy that to the hard
3555 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3557 tree decl_result
= DECL_RESULT (fndecl
);
3558 rtx decl_rtl
= DECL_RTL (decl_result
);
3560 if (REG_P (decl_rtl
)
3561 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3562 : DECL_REGISTER (decl_result
))
3566 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3568 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3569 /* The delay slot scheduler assumes that crtl->return_rtx
3570 holds the hard register containing the return value, not a
3571 temporary pseudo. */
3572 crtl
->return_rtx
= real_decl_rtl
;
3577 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3578 For all seen types, gimplify their sizes. */
3581 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3588 if (POINTER_TYPE_P (t
))
3590 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3591 && !TYPE_SIZES_GIMPLIFIED (t
))
3593 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3601 /* Gimplify the parameter list for current_function_decl. This involves
3602 evaluating SAVE_EXPRs of variable sized parameters and generating code
3603 to implement callee-copies reference parameters. Returns a sequence of
3604 statements to add to the beginning of the function. */
3607 gimplify_parameters (void)
3609 struct assign_parm_data_all all
;
3611 gimple_seq stmts
= NULL
;
3615 assign_parms_initialize_all (&all
);
3616 fnargs
= assign_parms_augmented_arg_list (&all
);
3618 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3620 struct assign_parm_data_one data
;
3622 /* Extract the type of PARM; adjust it according to ABI. */
3623 assign_parm_find_data_types (&all
, parm
, &data
);
3625 /* Early out for errors and void parameters. */
3626 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3629 /* Update info on where next arg arrives in registers. */
3630 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3631 data
.passed_type
, data
.named_arg
);
3633 /* ??? Once upon a time variable_size stuffed parameter list
3634 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3635 turned out to be less than manageable in the gimple world.
3636 Now we have to hunt them down ourselves. */
3637 walk_tree_without_duplicates (&data
.passed_type
,
3638 gimplify_parm_type
, &stmts
);
3640 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3642 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3643 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3646 if (data
.passed_pointer
)
3648 tree type
= TREE_TYPE (data
.passed_type
);
3649 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3650 type
, data
.named_arg
))
3654 /* For constant-sized objects, this is trivial; for
3655 variable-sized objects, we have to play games. */
3656 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3657 && !(flag_stack_check
== GENERIC_STACK_CHECK
3658 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3659 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3661 local
= create_tmp_var (type
, get_name (parm
));
3662 DECL_IGNORED_P (local
) = 0;
3663 /* If PARM was addressable, move that flag over
3664 to the local copy, as its address will be taken,
3665 not the PARMs. Keep the parms address taken
3666 as we'll query that flag during gimplification. */
3667 if (TREE_ADDRESSABLE (parm
))
3668 TREE_ADDRESSABLE (local
) = 1;
3669 else if (TREE_CODE (type
) == COMPLEX_TYPE
3670 || TREE_CODE (type
) == VECTOR_TYPE
)
3671 DECL_GIMPLE_REG_P (local
) = 1;
3675 tree ptr_type
, addr
;
3677 ptr_type
= build_pointer_type (type
);
3678 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3679 DECL_IGNORED_P (addr
) = 0;
3680 local
= build_fold_indirect_ref (addr
);
3682 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3683 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3684 size_int (DECL_ALIGN (parm
)));
3686 /* The call has been built for a variable-sized object. */
3687 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3688 t
= fold_convert (ptr_type
, t
);
3689 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3690 gimplify_and_add (t
, &stmts
);
3693 gimplify_assign (local
, parm
, &stmts
);
3695 SET_DECL_VALUE_EXPR (parm
, local
);
3696 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3706 /* Compute the size and offset from the start of the stacked arguments for a
3707 parm passed in mode PASSED_MODE and with type TYPE.
3709 INITIAL_OFFSET_PTR points to the current offset into the stacked
3712 The starting offset and size for this parm are returned in
3713 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3714 nonzero, the offset is that of stack slot, which is returned in
3715 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3716 padding required from the initial offset ptr to the stack slot.
3718 IN_REGS is nonzero if the argument will be passed in registers. It will
3719 never be set if REG_PARM_STACK_SPACE is not defined.
3721 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3722 for arguments which are passed in registers.
3724 FNDECL is the function in which the argument was defined.
3726 There are two types of rounding that are done. The first, controlled by
3727 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3728 argument list to be aligned to the specific boundary (in bits). This
3729 rounding affects the initial and starting offsets, but not the argument
3732 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3733 optionally rounds the size of the parm to PARM_BOUNDARY. The
3734 initial offset is not affected by this rounding, while the size always
3735 is and the starting offset may be. */
3737 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3738 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3739 callers pass in the total size of args so far as
3740 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3743 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3744 int reg_parm_stack_space
, int partial
,
3745 tree fndecl ATTRIBUTE_UNUSED
,
3746 struct args_size
*initial_offset_ptr
,
3747 struct locate_and_pad_arg_data
*locate
)
3750 enum direction where_pad
;
3751 unsigned int boundary
, round_boundary
;
3752 int part_size_in_regs
;
3754 /* If we have found a stack parm before we reach the end of the
3755 area reserved for registers, skip that area. */
3758 if (reg_parm_stack_space
> 0)
3760 if (initial_offset_ptr
->var
)
3762 initial_offset_ptr
->var
3763 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3764 ssize_int (reg_parm_stack_space
));
3765 initial_offset_ptr
->constant
= 0;
3767 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3768 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3772 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3775 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3776 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3777 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3778 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3780 locate
->where_pad
= where_pad
;
3782 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3783 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3784 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3786 locate
->boundary
= boundary
;
3788 if (SUPPORTS_STACK_ALIGNMENT
)
3790 /* stack_alignment_estimated can't change after stack has been
3792 if (crtl
->stack_alignment_estimated
< boundary
)
3794 if (!crtl
->stack_realign_processed
)
3795 crtl
->stack_alignment_estimated
= boundary
;
3798 /* If stack is realigned and stack alignment value
3799 hasn't been finalized, it is OK not to increase
3800 stack_alignment_estimated. The bigger alignment
3801 requirement is recorded in stack_alignment_needed
3803 gcc_assert (!crtl
->stack_realign_finalized
3804 && crtl
->stack_realign_needed
);
3809 /* Remember if the outgoing parameter requires extra alignment on the
3810 calling function side. */
3811 if (crtl
->stack_alignment_needed
< boundary
)
3812 crtl
->stack_alignment_needed
= boundary
;
3813 if (crtl
->preferred_stack_boundary
< boundary
)
3814 crtl
->preferred_stack_boundary
= boundary
;
3816 #ifdef ARGS_GROW_DOWNWARD
3817 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3818 if (initial_offset_ptr
->var
)
3819 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3820 initial_offset_ptr
->var
);
3824 if (where_pad
!= none
3825 && (!tree_fits_uhwi_p (sizetree
)
3826 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
3827 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
3828 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3831 locate
->slot_offset
.constant
+= part_size_in_regs
;
3833 if (!in_regs
|| reg_parm_stack_space
> 0)
3834 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3835 &locate
->alignment_pad
);
3837 locate
->size
.constant
= (-initial_offset_ptr
->constant
3838 - locate
->slot_offset
.constant
);
3839 if (initial_offset_ptr
->var
)
3840 locate
->size
.var
= size_binop (MINUS_EXPR
,
3841 size_binop (MINUS_EXPR
,
3843 initial_offset_ptr
->var
),
3844 locate
->slot_offset
.var
);
3846 /* Pad_below needs the pre-rounded size to know how much to pad
3848 locate
->offset
= locate
->slot_offset
;
3849 if (where_pad
== downward
)
3850 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3852 #else /* !ARGS_GROW_DOWNWARD */
3853 if (!in_regs
|| reg_parm_stack_space
> 0)
3854 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3855 &locate
->alignment_pad
);
3856 locate
->slot_offset
= *initial_offset_ptr
;
3858 #ifdef PUSH_ROUNDING
3859 if (passed_mode
!= BLKmode
)
3860 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3863 /* Pad_below needs the pre-rounded size to know how much to pad below
3864 so this must be done before rounding up. */
3865 locate
->offset
= locate
->slot_offset
;
3866 if (where_pad
== downward
)
3867 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3869 if (where_pad
!= none
3870 && (!tree_fits_uhwi_p (sizetree
)
3871 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
3872 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
3874 ADD_PARM_SIZE (locate
->size
, sizetree
);
3876 locate
->size
.constant
-= part_size_in_regs
;
3877 #endif /* ARGS_GROW_DOWNWARD */
3879 #ifdef FUNCTION_ARG_OFFSET
3880 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3884 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3885 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3888 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3889 struct args_size
*alignment_pad
)
3891 tree save_var
= NULL_TREE
;
3892 HOST_WIDE_INT save_constant
= 0;
3893 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3894 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3896 #ifdef SPARC_STACK_BOUNDARY_HACK
3897 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3898 the real alignment of %sp. However, when it does this, the
3899 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3900 if (SPARC_STACK_BOUNDARY_HACK
)
3904 if (boundary
> PARM_BOUNDARY
)
3906 save_var
= offset_ptr
->var
;
3907 save_constant
= offset_ptr
->constant
;
3910 alignment_pad
->var
= NULL_TREE
;
3911 alignment_pad
->constant
= 0;
3913 if (boundary
> BITS_PER_UNIT
)
3915 if (offset_ptr
->var
)
3917 tree sp_offset_tree
= ssize_int (sp_offset
);
3918 tree offset
= size_binop (PLUS_EXPR
,
3919 ARGS_SIZE_TREE (*offset_ptr
),
3921 #ifdef ARGS_GROW_DOWNWARD
3922 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3924 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3927 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3928 /* ARGS_SIZE_TREE includes constant term. */
3929 offset_ptr
->constant
= 0;
3930 if (boundary
> PARM_BOUNDARY
)
3931 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3936 offset_ptr
->constant
= -sp_offset
+
3937 #ifdef ARGS_GROW_DOWNWARD
3938 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3940 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3942 if (boundary
> PARM_BOUNDARY
)
3943 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3949 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3951 if (passed_mode
!= BLKmode
)
3953 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3954 offset_ptr
->constant
3955 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3956 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3957 - GET_MODE_SIZE (passed_mode
));
3961 if (TREE_CODE (sizetree
) != INTEGER_CST
3962 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3964 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3965 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3967 ADD_PARM_SIZE (*offset_ptr
, s2
);
3968 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3974 /* True if register REGNO was alive at a place where `setjmp' was
3975 called and was set more than once or is an argument. Such regs may
3976 be clobbered by `longjmp'. */
3979 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3981 /* There appear to be cases where some local vars never reach the
3982 backend but have bogus regnos. */
3983 if (regno
>= max_reg_num ())
3986 return ((REG_N_SETS (regno
) > 1
3987 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3989 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3992 /* Walk the tree of blocks describing the binding levels within a
3993 function and warn about variables the might be killed by setjmp or
3994 vfork. This is done after calling flow_analysis before register
3995 allocation since that will clobber the pseudo-regs to hard
3999 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4003 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4005 if (TREE_CODE (decl
) == VAR_DECL
4006 && DECL_RTL_SET_P (decl
)
4007 && REG_P (DECL_RTL (decl
))
4008 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4009 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4010 " %<longjmp%> or %<vfork%>", decl
);
4013 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4014 setjmp_vars_warning (setjmp_crosses
, sub
);
4017 /* Do the appropriate part of setjmp_vars_warning
4018 but for arguments instead of local variables. */
4021 setjmp_args_warning (bitmap setjmp_crosses
)
4024 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4025 decl
; decl
= DECL_CHAIN (decl
))
4026 if (DECL_RTL (decl
) != 0
4027 && REG_P (DECL_RTL (decl
))
4028 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4029 warning (OPT_Wclobbered
,
4030 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4034 /* Generate warning messages for variables live across setjmp. */
4037 generate_setjmp_warnings (void)
4039 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4041 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4042 || bitmap_empty_p (setjmp_crosses
))
4045 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4046 setjmp_args_warning (setjmp_crosses
);
4050 /* Reverse the order of elements in the fragment chain T of blocks,
4051 and return the new head of the chain (old last element).
4052 In addition to that clear BLOCK_SAME_RANGE flags when needed
4053 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4054 its super fragment origin. */
4057 block_fragments_nreverse (tree t
)
4059 tree prev
= 0, block
, next
, prev_super
= 0;
4060 tree super
= BLOCK_SUPERCONTEXT (t
);
4061 if (BLOCK_FRAGMENT_ORIGIN (super
))
4062 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4063 for (block
= t
; block
; block
= next
)
4065 next
= BLOCK_FRAGMENT_CHAIN (block
);
4066 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4067 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4068 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4070 BLOCK_SAME_RANGE (block
) = 0;
4071 prev_super
= BLOCK_SUPERCONTEXT (block
);
4072 BLOCK_SUPERCONTEXT (block
) = super
;
4075 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4076 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4078 BLOCK_SAME_RANGE (t
) = 0;
4079 BLOCK_SUPERCONTEXT (t
) = super
;
4083 /* Reverse the order of elements in the chain T of blocks,
4084 and return the new head of the chain (old last element).
4085 Also do the same on subblocks and reverse the order of elements
4086 in BLOCK_FRAGMENT_CHAIN as well. */
4089 blocks_nreverse_all (tree t
)
4091 tree prev
= 0, block
, next
;
4092 for (block
= t
; block
; block
= next
)
4094 next
= BLOCK_CHAIN (block
);
4095 BLOCK_CHAIN (block
) = prev
;
4096 if (BLOCK_FRAGMENT_CHAIN (block
)
4097 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4099 BLOCK_FRAGMENT_CHAIN (block
)
4100 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4101 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4102 BLOCK_SAME_RANGE (block
) = 0;
4104 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4111 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4112 and create duplicate blocks. */
4113 /* ??? Need an option to either create block fragments or to create
4114 abstract origin duplicates of a source block. It really depends
4115 on what optimization has been performed. */
4118 reorder_blocks (void)
4120 tree block
= DECL_INITIAL (current_function_decl
);
4122 if (block
== NULL_TREE
)
4125 auto_vec
<tree
, 10> block_stack
;
4127 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4128 clear_block_marks (block
);
4130 /* Prune the old trees away, so that they don't get in the way. */
4131 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4132 BLOCK_CHAIN (block
) = NULL_TREE
;
4134 /* Recreate the block tree from the note nesting. */
4135 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4136 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4139 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4142 clear_block_marks (tree block
)
4146 TREE_ASM_WRITTEN (block
) = 0;
4147 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4148 block
= BLOCK_CHAIN (block
);
4153 reorder_blocks_1 (rtx insns
, tree current_block
, vec
<tree
> *p_block_stack
)
4156 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4158 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4162 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4164 tree block
= NOTE_BLOCK (insn
);
4167 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4171 BLOCK_SAME_RANGE (prev_end
) = 0;
4172 prev_end
= NULL_TREE
;
4174 /* If we have seen this block before, that means it now
4175 spans multiple address regions. Create a new fragment. */
4176 if (TREE_ASM_WRITTEN (block
))
4178 tree new_block
= copy_node (block
);
4180 BLOCK_SAME_RANGE (new_block
) = 0;
4181 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4182 BLOCK_FRAGMENT_CHAIN (new_block
)
4183 = BLOCK_FRAGMENT_CHAIN (origin
);
4184 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4186 NOTE_BLOCK (insn
) = new_block
;
4190 if (prev_beg
== current_block
&& prev_beg
)
4191 BLOCK_SAME_RANGE (block
) = 1;
4195 BLOCK_SUBBLOCKS (block
) = 0;
4196 TREE_ASM_WRITTEN (block
) = 1;
4197 /* When there's only one block for the entire function,
4198 current_block == block and we mustn't do this, it
4199 will cause infinite recursion. */
4200 if (block
!= current_block
)
4203 if (block
!= origin
)
4204 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4205 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4208 if (p_block_stack
->is_empty ())
4209 super
= current_block
;
4212 super
= p_block_stack
->last ();
4213 gcc_assert (super
== current_block
4214 || BLOCK_FRAGMENT_ORIGIN (super
)
4217 BLOCK_SUPERCONTEXT (block
) = super
;
4218 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4219 BLOCK_SUBBLOCKS (current_block
) = block
;
4220 current_block
= origin
;
4222 p_block_stack
->safe_push (block
);
4224 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4226 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4227 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4228 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4229 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4230 prev_beg
= NULL_TREE
;
4231 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4232 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4237 prev_beg
= NULL_TREE
;
4239 BLOCK_SAME_RANGE (prev_end
) = 0;
4240 prev_end
= NULL_TREE
;
4245 /* Reverse the order of elements in the chain T of blocks,
4246 and return the new head of the chain (old last element). */
4249 blocks_nreverse (tree t
)
4251 tree prev
= 0, block
, next
;
4252 for (block
= t
; block
; block
= next
)
4254 next
= BLOCK_CHAIN (block
);
4255 BLOCK_CHAIN (block
) = prev
;
4261 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4262 by modifying the last node in chain 1 to point to chain 2. */
4265 block_chainon (tree op1
, tree op2
)
4274 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4276 BLOCK_CHAIN (t1
) = op2
;
4278 #ifdef ENABLE_TREE_CHECKING
4281 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4282 gcc_assert (t2
!= t1
);
4289 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4290 non-NULL, list them all into VECTOR, in a depth-first preorder
4291 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4295 all_blocks (tree block
, tree
*vector
)
4301 TREE_ASM_WRITTEN (block
) = 0;
4303 /* Record this block. */
4305 vector
[n_blocks
] = block
;
4309 /* Record the subblocks, and their subblocks... */
4310 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4311 vector
? vector
+ n_blocks
: 0);
4312 block
= BLOCK_CHAIN (block
);
4318 /* Return a vector containing all the blocks rooted at BLOCK. The
4319 number of elements in the vector is stored in N_BLOCKS_P. The
4320 vector is dynamically allocated; it is the caller's responsibility
4321 to call `free' on the pointer returned. */
4324 get_block_vector (tree block
, int *n_blocks_p
)
4328 *n_blocks_p
= all_blocks (block
, NULL
);
4329 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4330 all_blocks (block
, block_vector
);
4332 return block_vector
;
4335 static GTY(()) int next_block_index
= 2;
4337 /* Set BLOCK_NUMBER for all the blocks in FN. */
4340 number_blocks (tree fn
)
4346 /* For SDB and XCOFF debugging output, we start numbering the blocks
4347 from 1 within each function, rather than keeping a running
4349 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4350 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4351 next_block_index
= 1;
4354 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4356 /* The top-level BLOCK isn't numbered at all. */
4357 for (i
= 1; i
< n_blocks
; ++i
)
4358 /* We number the blocks from two. */
4359 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4361 free (block_vector
);
4366 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4369 debug_find_var_in_block_tree (tree var
, tree block
)
4373 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4377 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4379 tree ret
= debug_find_var_in_block_tree (var
, t
);
4387 /* Keep track of whether we're in a dummy function context. If we are,
4388 we don't want to invoke the set_current_function hook, because we'll
4389 get into trouble if the hook calls target_reinit () recursively or
4390 when the initial initialization is not yet complete. */
4392 static bool in_dummy_function
;
4394 /* Invoke the target hook when setting cfun. Update the optimization options
4395 if the function uses different options than the default. */
4398 invoke_set_current_function_hook (tree fndecl
)
4400 if (!in_dummy_function
)
4402 tree opts
= ((fndecl
)
4403 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4404 : optimization_default_node
);
4407 opts
= optimization_default_node
;
4409 /* Change optimization options if needed. */
4410 if (optimization_current_node
!= opts
)
4412 optimization_current_node
= opts
;
4413 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4416 targetm
.set_current_function (fndecl
);
4417 this_fn_optabs
= this_target_optabs
;
4419 if (opts
!= optimization_default_node
)
4421 init_tree_optimization_optabs (opts
);
4422 if (TREE_OPTIMIZATION_OPTABS (opts
))
4423 this_fn_optabs
= (struct target_optabs
*)
4424 TREE_OPTIMIZATION_OPTABS (opts
);
4429 /* cfun should never be set directly; use this function. */
4432 set_cfun (struct function
*new_cfun
)
4434 if (cfun
!= new_cfun
)
4437 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4441 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4443 static vec
<function_p
> cfun_stack
;
4445 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4446 current_function_decl accordingly. */
4449 push_cfun (struct function
*new_cfun
)
4451 gcc_assert ((!cfun
&& !current_function_decl
)
4452 || (cfun
&& current_function_decl
== cfun
->decl
));
4453 cfun_stack
.safe_push (cfun
);
4454 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4455 set_cfun (new_cfun
);
4458 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4463 struct function
*new_cfun
= cfun_stack
.pop ();
4464 /* When in_dummy_function, we do have a cfun but current_function_decl is
4465 NULL. We also allow pushing NULL cfun and subsequently changing
4466 current_function_decl to something else and have both restored by
4468 gcc_checking_assert (in_dummy_function
4470 || current_function_decl
== cfun
->decl
);
4471 set_cfun (new_cfun
);
4472 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4475 /* Return value of funcdef and increase it. */
4478 get_next_funcdef_no (void)
4480 return funcdef_no
++;
4483 /* Restore funcdef_no to FN. */
4486 set_funcdef_no (int fn
)
4491 /* Reset the funcdef number. */
4494 reset_funcdef_no (void)
4499 /* Return value of funcdef. */
4501 get_last_funcdef_no (void)
4506 /* Allocate a function structure for FNDECL and set its contents
4507 to the defaults. Set cfun to the newly-allocated object.
4508 Some of the helper functions invoked during initialization assume
4509 that cfun has already been set. Therefore, assign the new object
4510 directly into cfun and invoke the back end hook explicitly at the
4511 very end, rather than initializing a temporary and calling set_cfun
4514 ABSTRACT_P is true if this is a function that will never be seen by
4515 the middle-end. Such functions are front-end concepts (like C++
4516 function templates) that do not correspond directly to functions
4517 placed in object files. */
4520 allocate_struct_function (tree fndecl
, bool abstract_p
)
4522 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4524 cfun
= ggc_alloc_cleared_function ();
4526 init_eh_for_function ();
4528 if (init_machine_status
)
4529 cfun
->machine
= (*init_machine_status
) ();
4531 #ifdef OVERRIDE_ABI_FORMAT
4532 OVERRIDE_ABI_FORMAT (fndecl
);
4535 if (fndecl
!= NULL_TREE
)
4537 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4538 cfun
->decl
= fndecl
;
4539 current_function_funcdef_no
= get_next_funcdef_no ();
4540 cfun
->module_id
= current_module_id
;
4543 invoke_set_current_function_hook (fndecl
);
4545 if (fndecl
!= NULL_TREE
)
4547 tree result
= DECL_RESULT (fndecl
);
4548 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4550 #ifdef PCC_STATIC_STRUCT_RETURN
4551 cfun
->returns_pcc_struct
= 1;
4553 cfun
->returns_struct
= 1;
4556 cfun
->stdarg
= stdarg_p (fntype
);
4558 /* Assume all registers in stdarg functions need to be saved. */
4559 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4560 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4562 /* ??? This could be set on a per-function basis by the front-end
4563 but is this worth the hassle? */
4564 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4565 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4569 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4570 instead of just setting it. */
4573 push_struct_function (tree fndecl
)
4575 /* When in_dummy_function we might be in the middle of a pop_cfun and
4576 current_function_decl and cfun may not match. */
4577 gcc_assert (in_dummy_function
4578 || (!cfun
&& !current_function_decl
)
4579 || (cfun
&& current_function_decl
== cfun
->decl
));
4580 cfun_stack
.safe_push (cfun
);
4581 current_function_decl
= fndecl
;
4582 allocate_struct_function (fndecl
, false);
4585 /* Reset crtl and other non-struct-function variables to defaults as
4586 appropriate for emitting rtl at the start of a function. */
4589 prepare_function_start (void)
4591 gcc_assert (!crtl
->emit
.x_last_insn
);
4594 init_varasm_status ();
4596 default_rtl_profile ();
4598 if (flag_stack_usage_info
)
4600 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4601 cfun
->su
->static_stack_size
= -1;
4604 cse_not_expected
= ! optimize
;
4606 /* Caller save not needed yet. */
4607 caller_save_needed
= 0;
4609 /* We haven't done register allocation yet. */
4612 /* Indicate that we have not instantiated virtual registers yet. */
4613 virtuals_instantiated
= 0;
4615 /* Indicate that we want CONCATs now. */
4616 generating_concat_p
= 1;
4618 /* Indicate we have no need of a frame pointer yet. */
4619 frame_pointer_needed
= 0;
4622 /* Initialize the rtl expansion mechanism so that we can do simple things
4623 like generate sequences. This is used to provide a context during global
4624 initialization of some passes. You must call expand_dummy_function_end
4625 to exit this context. */
4628 init_dummy_function_start (void)
4630 gcc_assert (!in_dummy_function
);
4631 in_dummy_function
= true;
4632 push_struct_function (NULL_TREE
);
4633 prepare_function_start ();
4636 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4637 and initialize static variables for generating RTL for the statements
4641 init_function_start (tree subr
)
4643 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4644 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4646 allocate_struct_function (subr
, false);
4647 prepare_function_start ();
4648 decide_function_section (subr
);
4650 /* Warn if this value is an aggregate type,
4651 regardless of which calling convention we are using for it. */
4652 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4653 warning (OPT_Waggregate_return
, "function returns an aggregate");
4656 /* Expand code to verify the stack_protect_guard. This is invoked at
4657 the end of a function to be protected. */
4659 #ifndef HAVE_stack_protect_test
4660 # define HAVE_stack_protect_test 0
4661 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4665 stack_protect_epilogue (void)
4667 tree guard_decl
= targetm
.stack_protect_guard ();
4668 rtx label
= gen_label_rtx ();
4671 x
= expand_normal (crtl
->stack_protect_guard
);
4672 y
= expand_normal (guard_decl
);
4674 /* Allow the target to compare Y with X without leaking either into
4676 switch (HAVE_stack_protect_test
!= 0)
4679 tmp
= gen_stack_protect_test (x
, y
, label
);
4688 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4692 /* The noreturn predictor has been moved to the tree level. The rtl-level
4693 predictors estimate this branch about 20%, which isn't enough to get
4694 things moved out of line. Since this is the only extant case of adding
4695 a noreturn function at the rtl level, it doesn't seem worth doing ought
4696 except adding the prediction by hand. */
4697 tmp
= get_last_insn ();
4699 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4701 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4706 /* Start the RTL for a new function, and set variables used for
4708 SUBR is the FUNCTION_DECL node.
4709 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4710 the function's parameters, which must be run at any return statement. */
4713 expand_function_start (tree subr
)
4715 /* Make sure volatile mem refs aren't considered
4716 valid operands of arithmetic insns. */
4717 init_recog_no_volatile ();
4721 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4724 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4726 /* Make the label for return statements to jump to. Do not special
4727 case machines with special return instructions -- they will be
4728 handled later during jump, ifcvt, or epilogue creation. */
4729 return_label
= gen_label_rtx ();
4731 /* Initialize rtx used to return the value. */
4732 /* Do this before assign_parms so that we copy the struct value address
4733 before any library calls that assign parms might generate. */
4735 /* Decide whether to return the value in memory or in a register. */
4736 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4738 /* Returning something that won't go in a register. */
4739 rtx value_address
= 0;
4741 #ifdef PCC_STATIC_STRUCT_RETURN
4742 if (cfun
->returns_pcc_struct
)
4744 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4745 value_address
= assemble_static_space (size
);
4750 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4751 /* Expect to be passed the address of a place to store the value.
4752 If it is passed as an argument, assign_parms will take care of
4756 value_address
= gen_reg_rtx (Pmode
);
4757 emit_move_insn (value_address
, sv
);
4762 rtx x
= value_address
;
4763 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4765 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4766 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4768 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4771 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4772 /* If return mode is void, this decl rtl should not be used. */
4773 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4776 /* Compute the return values into a pseudo reg, which we will copy
4777 into the true return register after the cleanups are done. */
4778 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4779 if (TYPE_MODE (return_type
) != BLKmode
4780 && targetm
.calls
.return_in_msb (return_type
))
4781 /* expand_function_end will insert the appropriate padding in
4782 this case. Use the return value's natural (unpadded) mode
4783 within the function proper. */
4784 SET_DECL_RTL (DECL_RESULT (subr
),
4785 gen_reg_rtx (TYPE_MODE (return_type
)));
4788 /* In order to figure out what mode to use for the pseudo, we
4789 figure out what the mode of the eventual return register will
4790 actually be, and use that. */
4791 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4793 /* Structures that are returned in registers are not
4794 aggregate_value_p, so we may see a PARALLEL or a REG. */
4795 if (REG_P (hard_reg
))
4796 SET_DECL_RTL (DECL_RESULT (subr
),
4797 gen_reg_rtx (GET_MODE (hard_reg
)));
4800 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4801 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4805 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4806 result to the real return register(s). */
4807 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4810 /* Initialize rtx for parameters and local variables.
4811 In some cases this requires emitting insns. */
4812 assign_parms (subr
);
4814 /* If function gets a static chain arg, store it. */
4815 if (cfun
->static_chain_decl
)
4817 tree parm
= cfun
->static_chain_decl
;
4818 rtx local
, chain
, insn
;
4820 local
= gen_reg_rtx (Pmode
);
4821 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4823 set_decl_incoming_rtl (parm
, chain
, false);
4824 SET_DECL_RTL (parm
, local
);
4825 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4827 insn
= emit_move_insn (local
, chain
);
4829 /* Mark the register as eliminable, similar to parameters. */
4831 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4832 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
4835 /* If the function receives a non-local goto, then store the
4836 bits we need to restore the frame pointer. */
4837 if (cfun
->nonlocal_goto_save_area
)
4842 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4843 gcc_assert (DECL_RTL_SET_P (var
));
4845 t_save
= build4 (ARRAY_REF
,
4846 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
4847 cfun
->nonlocal_goto_save_area
,
4848 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4849 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4850 gcc_assert (GET_MODE (r_save
) == Pmode
);
4852 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4853 update_nonlocal_goto_save_area ();
4856 /* The following was moved from init_function_start.
4857 The move is supposed to make sdb output more accurate. */
4858 /* Indicate the beginning of the function body,
4859 as opposed to parm setup. */
4860 emit_note (NOTE_INSN_FUNCTION_BEG
);
4862 gcc_assert (NOTE_P (get_last_insn ()));
4864 parm_birth_insn
= get_last_insn ();
4869 PROFILE_HOOK (current_function_funcdef_no
);
4873 /* If we are doing generic stack checking, the probe should go here. */
4874 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4875 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4878 /* Undo the effects of init_dummy_function_start. */
4880 expand_dummy_function_end (void)
4882 gcc_assert (in_dummy_function
);
4884 /* End any sequences that failed to be closed due to syntax errors. */
4885 while (in_sequence_p ())
4888 /* Outside function body, can't compute type's actual size
4889 until next function's body starts. */
4891 free_after_parsing (cfun
);
4892 free_after_compilation (cfun
);
4894 in_dummy_function
= false;
4897 /* Call DOIT for each hard register used as a return value from
4898 the current function. */
4901 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4903 rtx outgoing
= crtl
->return_rtx
;
4908 if (REG_P (outgoing
))
4909 (*doit
) (outgoing
, arg
);
4910 else if (GET_CODE (outgoing
) == PARALLEL
)
4914 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4916 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4918 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4925 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4931 clobber_return_register (void)
4933 diddle_return_value (do_clobber_return_reg
, NULL
);
4935 /* In case we do use pseudo to return value, clobber it too. */
4936 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4938 tree decl_result
= DECL_RESULT (current_function_decl
);
4939 rtx decl_rtl
= DECL_RTL (decl_result
);
4940 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4942 do_clobber_return_reg (decl_rtl
, NULL
);
4948 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4954 use_return_register (void)
4956 diddle_return_value (do_use_return_reg
, NULL
);
4959 /* Possibly warn about unused parameters. */
4961 do_warn_unused_parameter (tree fn
)
4965 for (decl
= DECL_ARGUMENTS (fn
);
4966 decl
; decl
= DECL_CHAIN (decl
))
4967 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4968 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4969 && !TREE_NO_WARNING (decl
))
4970 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4973 /* Set the location of the insn chain starting at INSN to LOC. */
4976 set_insn_locations (rtx insn
, int loc
)
4978 while (insn
!= NULL_RTX
)
4981 INSN_LOCATION (insn
) = loc
;
4982 insn
= NEXT_INSN (insn
);
4986 /* Generate RTL for the end of the current function. */
4989 expand_function_end (void)
4993 /* If arg_pointer_save_area was referenced only from a nested
4994 function, we will not have initialized it yet. Do that now. */
4995 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4996 get_arg_pointer_save_area ();
4998 /* If we are doing generic stack checking and this function makes calls,
4999 do a stack probe at the start of the function to ensure we have enough
5000 space for another stack frame. */
5001 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5005 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5008 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5010 if (STACK_CHECK_MOVING_SP
)
5011 anti_adjust_stack_and_probe (max_frame_size
, true);
5013 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5016 set_insn_locations (seq
, prologue_location
);
5017 emit_insn_before (seq
, stack_check_probe_note
);
5022 /* End any sequences that failed to be closed due to syntax errors. */
5023 while (in_sequence_p ())
5026 clear_pending_stack_adjust ();
5027 do_pending_stack_adjust ();
5029 /* Output a linenumber for the end of the function.
5030 SDB depends on this. */
5031 set_curr_insn_location (input_location
);
5033 /* Before the return label (if any), clobber the return
5034 registers so that they are not propagated live to the rest of
5035 the function. This can only happen with functions that drop
5036 through; if there had been a return statement, there would
5037 have either been a return rtx, or a jump to the return label.
5039 We delay actual code generation after the current_function_value_rtx
5041 clobber_after
= get_last_insn ();
5043 /* Output the label for the actual return from the function. */
5044 emit_label (return_label
);
5046 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5048 /* Let except.c know where it should emit the call to unregister
5049 the function context for sjlj exceptions. */
5050 if (flag_exceptions
)
5051 sjlj_emit_function_exit_after (get_last_insn ());
5055 /* We want to ensure that instructions that may trap are not
5056 moved into the epilogue by scheduling, because we don't
5057 always emit unwind information for the epilogue. */
5058 if (cfun
->can_throw_non_call_exceptions
)
5059 emit_insn (gen_blockage ());
5062 /* If this is an implementation of throw, do what's necessary to
5063 communicate between __builtin_eh_return and the epilogue. */
5064 expand_eh_return ();
5066 /* If scalar return value was computed in a pseudo-reg, or was a named
5067 return value that got dumped to the stack, copy that to the hard
5069 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5071 tree decl_result
= DECL_RESULT (current_function_decl
);
5072 rtx decl_rtl
= DECL_RTL (decl_result
);
5074 if (REG_P (decl_rtl
)
5075 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5076 : DECL_REGISTER (decl_result
))
5078 rtx real_decl_rtl
= crtl
->return_rtx
;
5080 /* This should be set in assign_parms. */
5081 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5083 /* If this is a BLKmode structure being returned in registers,
5084 then use the mode computed in expand_return. Note that if
5085 decl_rtl is memory, then its mode may have been changed,
5086 but that crtl->return_rtx has not. */
5087 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5088 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5090 /* If a non-BLKmode return value should be padded at the least
5091 significant end of the register, shift it left by the appropriate
5092 amount. BLKmode results are handled using the group load/store
5094 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5095 && REG_P (real_decl_rtl
)
5096 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5098 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5099 REGNO (real_decl_rtl
)),
5101 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5103 /* If a named return value dumped decl_return to memory, then
5104 we may need to re-do the PROMOTE_MODE signed/unsigned
5106 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5108 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5109 promote_function_mode (TREE_TYPE (decl_result
),
5110 GET_MODE (decl_rtl
), &unsignedp
,
5111 TREE_TYPE (current_function_decl
), 1);
5113 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5115 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5117 /* If expand_function_start has created a PARALLEL for decl_rtl,
5118 move the result to the real return registers. Otherwise, do
5119 a group load from decl_rtl for a named return. */
5120 if (GET_CODE (decl_rtl
) == PARALLEL
)
5121 emit_group_move (real_decl_rtl
, decl_rtl
);
5123 emit_group_load (real_decl_rtl
, decl_rtl
,
5124 TREE_TYPE (decl_result
),
5125 int_size_in_bytes (TREE_TYPE (decl_result
)));
5127 /* In the case of complex integer modes smaller than a word, we'll
5128 need to generate some non-trivial bitfield insertions. Do that
5129 on a pseudo and not the hard register. */
5130 else if (GET_CODE (decl_rtl
) == CONCAT
5131 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5132 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5134 int old_generating_concat_p
;
5137 old_generating_concat_p
= generating_concat_p
;
5138 generating_concat_p
= 0;
5139 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5140 generating_concat_p
= old_generating_concat_p
;
5142 emit_move_insn (tmp
, decl_rtl
);
5143 emit_move_insn (real_decl_rtl
, tmp
);
5146 emit_move_insn (real_decl_rtl
, decl_rtl
);
5150 /* If returning a structure, arrange to return the address of the value
5151 in a place where debuggers expect to find it.
5153 If returning a structure PCC style,
5154 the caller also depends on this value.
5155 And cfun->returns_pcc_struct is not necessarily set. */
5156 if (cfun
->returns_struct
5157 || cfun
->returns_pcc_struct
)
5159 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5160 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5163 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5164 type
= TREE_TYPE (type
);
5166 value_address
= XEXP (value_address
, 0);
5168 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5169 current_function_decl
, true);
5171 /* Mark this as a function return value so integrate will delete the
5172 assignment and USE below when inlining this function. */
5173 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5175 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5176 value_address
= convert_memory_address (GET_MODE (outgoing
),
5179 emit_move_insn (outgoing
, value_address
);
5181 /* Show return register used to hold result (in this case the address
5183 crtl
->return_rtx
= outgoing
;
5186 /* Emit the actual code to clobber return register. Don't emit
5187 it if clobber_after is a barrier, then the previous basic block
5188 certainly doesn't fall thru into the exit block. */
5189 if (!BARRIER_P (clobber_after
))
5194 clobber_return_register ();
5198 emit_insn_after (seq
, clobber_after
);
5201 /* Output the label for the naked return from the function. */
5202 if (naked_return_label
)
5203 emit_label (naked_return_label
);
5205 /* @@@ This is a kludge. We want to ensure that instructions that
5206 may trap are not moved into the epilogue by scheduling, because
5207 we don't always emit unwind information for the epilogue. */
5208 if (cfun
->can_throw_non_call_exceptions
5209 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5210 emit_insn (gen_blockage ());
5212 /* If stack protection is enabled for this function, check the guard. */
5213 if (crtl
->stack_protect_guard
)
5214 stack_protect_epilogue ();
5216 /* If we had calls to alloca, and this machine needs
5217 an accurate stack pointer to exit the function,
5218 insert some code to save and restore the stack pointer. */
5219 if (! EXIT_IGNORE_STACK
5220 && cfun
->calls_alloca
)
5225 emit_stack_save (SAVE_FUNCTION
, &tem
);
5228 emit_insn_before (seq
, parm_birth_insn
);
5230 emit_stack_restore (SAVE_FUNCTION
, tem
);
5233 /* ??? This should no longer be necessary since stupid is no longer with
5234 us, but there are some parts of the compiler (eg reload_combine, and
5235 sh mach_dep_reorg) that still try and compute their own lifetime info
5236 instead of using the general framework. */
5237 use_return_register ();
5241 get_arg_pointer_save_area (void)
5243 rtx ret
= arg_pointer_save_area
;
5247 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5248 arg_pointer_save_area
= ret
;
5251 if (! crtl
->arg_pointer_save_area_init
)
5255 /* Save the arg pointer at the beginning of the function. The
5256 generated stack slot may not be a valid memory address, so we
5257 have to check it and fix it if necessary. */
5259 emit_move_insn (validize_mem (ret
),
5260 crtl
->args
.internal_arg_pointer
);
5264 push_topmost_sequence ();
5265 emit_insn_after (seq
, entry_of_function ());
5266 pop_topmost_sequence ();
5268 crtl
->arg_pointer_save_area_init
= true;
5274 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5275 for the first time. */
5278 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5281 htab_t hash
= *hashp
;
5285 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5287 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5289 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5290 gcc_assert (*slot
== NULL
);
5295 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5296 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5297 insn, then record COPY as well. */
5300 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5305 hash
= epilogue_insn_hash
;
5306 if (!hash
|| !htab_find (hash
, insn
))
5308 hash
= prologue_insn_hash
;
5309 if (!hash
|| !htab_find (hash
, insn
))
5313 slot
= htab_find_slot (hash
, copy
, INSERT
);
5314 gcc_assert (*slot
== NULL
);
5318 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5319 we can be running after reorg, SEQUENCE rtl is possible. */
5322 contains (const_rtx insn
, htab_t hash
)
5327 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5330 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5331 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5336 return htab_find (hash
, insn
) != NULL
;
5340 prologue_epilogue_contains (const_rtx insn
)
5342 if (contains (insn
, prologue_insn_hash
))
5344 if (contains (insn
, epilogue_insn_hash
))
5349 #ifdef HAVE_simple_return
5351 /* Return true if INSN requires the stack frame to be set up.
5352 PROLOGUE_USED contains the hard registers used in the function
5353 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5354 prologue to set up for the function. */
5356 requires_stack_frame_p (rtx insn
, HARD_REG_SET prologue_used
,
5357 HARD_REG_SET set_up_by_prologue
)
5360 HARD_REG_SET hardregs
;
5364 return !SIBLING_CALL_P (insn
);
5366 /* We need a frame to get the unique CFA expected by the unwinder. */
5367 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5370 CLEAR_HARD_REG_SET (hardregs
);
5371 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
5373 rtx dreg
= DF_REF_REG (*df_rec
);
5378 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
5381 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
5383 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
5384 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5385 if (TEST_HARD_REG_BIT (hardregs
, regno
)
5386 && df_regs_ever_live_p (regno
))
5389 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
5391 rtx reg
= DF_REF_REG (*df_rec
);
5396 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
5399 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
5405 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5406 and if BB is its only predecessor. Return that block if so,
5407 otherwise return null. */
5410 next_block_for_reg (basic_block bb
, int regno
, int end_regno
)
5418 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5420 live
= df_get_live_in (e
->dest
);
5421 for (i
= regno
; i
< end_regno
; i
++)
5422 if (REGNO_REG_SET_P (live
, i
))
5424 if (live_edge
&& live_edge
!= e
)
5430 /* We can sometimes encounter dead code. Don't try to move it
5431 into the exit block. */
5432 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
5435 /* Reject targets of abnormal edges. This is needed for correctness
5436 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5437 exception edges even though it is generally treated as call-saved
5438 for the majority of the compilation. Moving across abnormal edges
5439 isn't going to be interesting for shrink-wrap usage anyway. */
5440 if (live_edge
->flags
& EDGE_ABNORMAL
)
5443 if (EDGE_COUNT (live_edge
->dest
->preds
) > 1)
5446 return live_edge
->dest
;
5449 /* Try to move INSN from BB to a successor. Return true on success.
5450 USES and DEFS are the set of registers that are used and defined
5451 after INSN in BB. */
5454 move_insn_for_shrink_wrap (basic_block bb
, rtx insn
,
5455 const HARD_REG_SET uses
,
5456 const HARD_REG_SET defs
)
5459 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
5460 unsigned int i
, dregno
, end_dregno
, sregno
, end_sregno
;
5461 basic_block next_block
;
5463 /* Look for a simple register copy. */
5464 set
= single_set (insn
);
5467 src
= SET_SRC (set
);
5468 dest
= SET_DEST (set
);
5469 if (!REG_P (dest
) || !REG_P (src
))
5472 /* Make sure that the source register isn't defined later in BB. */
5473 sregno
= REGNO (src
);
5474 end_sregno
= END_REGNO (src
);
5475 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
5478 /* Make sure that the destination register isn't referenced later in BB. */
5479 dregno
= REGNO (dest
);
5480 end_dregno
= END_REGNO (dest
);
5481 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
5482 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
5485 /* See whether there is a successor block to which we could move INSN. */
5486 next_block
= next_block_for_reg (bb
, dregno
, end_dregno
);
5490 /* At this point we are committed to moving INSN, but let's try to
5491 move it as far as we can. */
5494 live_out
= df_get_live_out (bb
);
5495 live_in
= df_get_live_in (next_block
);
5498 /* Check whether BB uses DEST or clobbers DEST. We need to add
5499 INSN to BB if so. Either way, DEST is no longer live on entry,
5500 except for any part that overlaps SRC (next loop). */
5501 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
5502 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
5505 for (i
= dregno
; i
< end_dregno
; i
++)
5507 if (REGNO_REG_SET_P (bb_uses
, i
) || REGNO_REG_SET_P (bb_defs
, i
)
5508 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5510 CLEAR_REGNO_REG_SET (live_out
, i
);
5511 CLEAR_REGNO_REG_SET (live_in
, i
);
5514 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5515 Either way, SRC is now live on entry. */
5516 for (i
= sregno
; i
< end_sregno
; i
++)
5518 if (REGNO_REG_SET_P (bb_defs
, i
)
5519 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5521 SET_REGNO_REG_SET (live_out
, i
);
5522 SET_REGNO_REG_SET (live_in
, i
);
5527 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
5528 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
5529 at -O1, just give up searching NEXT_BLOCK. */
5531 for (i
= dregno
; i
< end_dregno
; i
++)
5533 CLEAR_REGNO_REG_SET (live_out
, i
);
5534 CLEAR_REGNO_REG_SET (live_in
, i
);
5537 for (i
= sregno
; i
< end_sregno
; i
++)
5539 SET_REGNO_REG_SET (live_out
, i
);
5540 SET_REGNO_REG_SET (live_in
, i
);
5544 /* If we don't need to add the move to BB, look for a single
5547 next_block
= next_block_for_reg (next_block
, dregno
, end_dregno
);
5551 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5553 for (i
= dregno
; i
< end_dregno
; i
++)
5555 CLEAR_REGNO_REG_SET (bb_uses
, i
);
5556 SET_REGNO_REG_SET (bb_defs
, i
);
5559 /* BB now uses SRC. */
5560 for (i
= sregno
; i
< end_sregno
; i
++)
5561 SET_REGNO_REG_SET (bb_uses
, i
);
5563 emit_insn_after (PATTERN (insn
), bb_note (bb
));
5568 /* Look for register copies in the first block of the function, and move
5569 them down into successor blocks if the register is used only on one
5570 path. This exposes more opportunities for shrink-wrapping. These
5571 kinds of sets often occur when incoming argument registers are moved
5572 to call-saved registers because their values are live across one or
5573 more calls during the function. */
5576 prepare_shrink_wrap (basic_block entry_block
)
5579 HARD_REG_SET uses
, defs
;
5582 CLEAR_HARD_REG_SET (uses
);
5583 CLEAR_HARD_REG_SET (defs
);
5584 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
5585 if (NONDEBUG_INSN_P (insn
)
5586 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
))
5588 /* Add all defined registers to DEFs. */
5589 for (ref
= DF_INSN_DEFS (insn
); *ref
; ref
++)
5591 x
= DF_REF_REG (*ref
);
5592 if (REG_P (x
) && HARD_REGISTER_P (x
))
5593 SET_HARD_REG_BIT (defs
, REGNO (x
));
5596 /* Add all used registers to USESs. */
5597 for (ref
= DF_INSN_USES (insn
); *ref
; ref
++)
5599 x
= DF_REF_REG (*ref
);
5600 if (REG_P (x
) && HARD_REGISTER_P (x
))
5601 SET_HARD_REG_BIT (uses
, REGNO (x
));
5604 /* If frame_pointer_partially_needed, compiler will
5605 implicitly insert frame pointer setting insn
5606 before call after prologue is generated. So
5607 assume an implicit def immediately before any
5609 if (frame_pointer_partially_needed
&& CALL_P (insn
))
5610 SET_HARD_REG_BIT (defs
, HARD_FRAME_POINTER_REGNUM
);
5617 /* Insert use of return register before the end of BB. */
5620 emit_use_return_register_into_block (basic_block bb
)
5624 use_return_register ();
5629 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5630 insn
= prev_cc0_setter (insn
);
5632 emit_insn_before (seq
, insn
);
5636 /* Create a return pattern, either simple_return or return, depending on
5640 gen_return_pattern (bool simple_p
)
5642 #ifdef HAVE_simple_return
5643 return simple_p
? gen_simple_return () : gen_return ();
5645 gcc_assert (!simple_p
);
5646 return gen_return ();
5650 /* Insert an appropriate return pattern at the end of block BB. This
5651 also means updating block_for_insn appropriately. SIMPLE_P is
5652 the same as in gen_return_pattern and passed to it. */
5655 emit_return_into_block (bool simple_p
, basic_block bb
)
5658 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5659 pat
= PATTERN (jump
);
5660 if (GET_CODE (pat
) == PARALLEL
)
5661 pat
= XVECEXP (pat
, 0, 0);
5662 gcc_assert (ANY_RETURN_P (pat
));
5663 JUMP_LABEL (jump
) = pat
;
5667 /* Set JUMP_LABEL for a return insn. */
5670 set_return_jump_label (rtx returnjump
)
5672 rtx pat
= PATTERN (returnjump
);
5673 if (GET_CODE (pat
) == PARALLEL
)
5674 pat
= XVECEXP (pat
, 0, 0);
5675 if (ANY_RETURN_P (pat
))
5676 JUMP_LABEL (returnjump
) = pat
;
5678 JUMP_LABEL (returnjump
) = ret_rtx
;
5681 #ifdef HAVE_simple_return
5682 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5683 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5685 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx before
,
5686 bitmap_head
*need_prologue
)
5690 rtx insn
= BB_END (bb
);
5692 /* We know BB has a single successor, so there is no need to copy a
5693 simple jump at the end of BB. */
5694 if (simplejump_p (insn
))
5695 insn
= PREV_INSN (insn
);
5698 duplicate_insn_chain (BB_HEAD (bb
), insn
);
5702 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5703 if (active_insn_p (insn
))
5705 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
5706 bb
->index
, copy_bb
->index
, count
);
5708 insn
= get_insns ();
5710 emit_insn_before (insn
, before
);
5712 /* Redirect all the paths that need no prologue into copy_bb. */
5713 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
5714 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
5716 int freq
= EDGE_FREQUENCY (e
);
5717 copy_bb
->count
+= e
->count
;
5718 copy_bb
->frequency
+= EDGE_FREQUENCY (e
);
5719 e
->dest
->count
-= e
->count
;
5720 if (e
->dest
->count
< 0)
5722 e
->dest
->frequency
-= freq
;
5723 if (e
->dest
->frequency
< 0)
5724 e
->dest
->frequency
= 0;
5725 redirect_edge_and_branch_force (e
, copy_bb
);
5733 #if defined (HAVE_return) || defined (HAVE_simple_return)
5734 /* Return true if there are any active insns between HEAD and TAIL. */
5736 active_insn_between (rtx head
, rtx tail
)
5740 if (active_insn_p (tail
))
5744 tail
= PREV_INSN (tail
);
5749 /* LAST_BB is a block that exits, and empty of active instructions.
5750 Examine its predecessors for jumps that can be converted to
5751 (conditional) returns. */
5753 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5754 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5761 auto_vec
<basic_block
> src_bbs (EDGE_COUNT (last_bb
->preds
));
5763 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5764 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5765 src_bbs
.quick_push (e
->src
);
5767 label
= BB_HEAD (last_bb
);
5769 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5771 rtx jump
= BB_END (bb
);
5773 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5776 e
= find_edge (bb
, last_bb
);
5778 /* If we have an unconditional jump, we can replace that
5779 with a simple return instruction. */
5780 if (simplejump_p (jump
))
5782 /* The use of the return register might be present in the exit
5783 fallthru block. Either:
5784 - removing the use is safe, and we should remove the use in
5785 the exit fallthru block, or
5786 - removing the use is not safe, and we should add it here.
5787 For now, we conservatively choose the latter. Either of the
5788 2 helps in crossjumping. */
5789 emit_use_return_register_into_block (bb
);
5791 emit_return_into_block (simple_p
, bb
);
5795 /* If we have a conditional jump branching to the last
5796 block, we can try to replace that with a conditional
5797 return instruction. */
5798 else if (condjump_p (jump
))
5803 dest
= simple_return_rtx
;
5806 if (!redirect_jump (jump
, dest
, 0))
5808 #ifdef HAVE_simple_return
5813 "Failed to redirect bb %d branch.\n", bb
->index
);
5814 unconverted
.safe_push (e
);
5820 /* See comment in simplejump_p case above. */
5821 emit_use_return_register_into_block (bb
);
5823 /* If this block has only one successor, it both jumps
5824 and falls through to the fallthru block, so we can't
5826 if (single_succ_p (bb
))
5831 #ifdef HAVE_simple_return
5836 "Failed to redirect bb %d branch.\n", bb
->index
);
5837 unconverted
.safe_push (e
);
5843 /* Fix up the CFG for the successful change we just made. */
5844 redirect_edge_succ (e
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
5845 e
->flags
&= ~EDGE_CROSSING
;
5851 /* Emit a return insn for the exit fallthru block. */
5853 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5855 basic_block last_bb
= exit_fallthru_edge
->src
;
5857 if (JUMP_P (BB_END (last_bb
)))
5859 last_bb
= split_edge (exit_fallthru_edge
);
5860 exit_fallthru_edge
= single_succ_edge (last_bb
);
5862 emit_barrier_after (BB_END (last_bb
));
5863 emit_return_into_block (simple_p
, last_bb
);
5864 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5872 /* There is call in this bb and there is no fp def or use
5873 between the call and bb entry. */
5874 bool has_orig_upexposed_fpset
;
5875 /* If it is needed to insert fpset on the entry of bb,
5876 we tend to insert it before the original call. */
5877 rtx place_for_upexposed_fpset
;
5878 /* There is call in this bb, it is not bb upward exposed and
5879 there is no fp def and use between the call and bb exit. */
5880 bool has_orig_downexposed_fpset
;
5882 /* fpset from bbs below could be moved across the current bb
5883 to its dominator. There are two cases when transparent is
5885 1. There is any fp reference in current bb.
5886 2. fp is in the live-out set of current bb. */
5888 /* There is any fp reference in current bb. Only for dumping. */
5891 /* If there is fp setting insn moved to here from other bb. */
5894 /* If a fp setting insn to be moved here, insert it
5895 in the bb as early as possible. place_to_move is the place
5896 to moved to. It is trying not to break the macrofusion of
5900 /* The target bb set the fp setting in current bb could move
5901 to, .i.e, any path between target bb and current bb has no
5903 sbitmap can_move_to
;
5905 /* The list connecting child bbs having upward exposed fp
5906 setting which are possible promotion candidates. */
5907 struct bb_fpset
*next
, *end
;
5910 /* The object containing information about fpsetting insns inside
5911 a bb. fp setting insns are those insns saving frame address to
5913 static struct bb_fpset
*bb_fpsets
;
5915 /* Dump the link list connecting all the promotion candidates below
5917 DEBUG_FUNCTION
static void
5918 dump_fpset_list (FILE *file
, struct bb_fpset
*fpset
)
5920 struct bb_fpset
*subnode
;
5922 fprintf (file
, "bb%d contains link list:\n", fpset
->bb
->index
);
5923 subnode
= fpset
->next
;
5926 fprintf (file
, "-> bb%d", subnode
->bb
->index
);
5927 #ifdef ENABLE_CHECKING
5928 gcc_assert(subnode
->end
== NULL
);
5930 gcc_assert(fpset
->end
== subnode
);
5932 subnode
= subnode
->next
;
5934 fprintf (file
, "\n");
5937 /* For a link list, only starting node have non-NULL next and end fields.
5938 the end field should point to the last node in the link list. For the
5939 nodes except the starting one, their end field should be NULL. */
5940 DEBUG_FUNCTION
static void
5941 fpset_list_sanity_check (struct bb_fpset
*fpset
)
5943 struct bb_fpset
*subnode
;
5944 subnode
= fpset
->next
;
5946 gcc_assert (!fpset
->end
);
5951 gcc_assert(subnode
->end
== NULL
);
5953 gcc_assert(fpset
->end
== subnode
);
5954 subnode
= subnode
->next
;
5959 /* Dump the status of shrinkwrapping contained in FPSET. */
5961 dump_bb_fpset (FILE *file
, struct bb_fpset
*fpset
)
5963 int place_for_upexposed_fpset
= fpset
->place_for_upexposed_fpset
?
5964 INSN_UID (fpset
->place_for_upexposed_fpset
)
5966 int place_to_move
= fpset
->place_to_move
? INSN_UID (fpset
->place_to_move
)
5969 "bb[%d]: upexposed_fpset: %d, place for upexposed_fpset: %d\n"
5970 " downexposed_fpset: %d, transparent: %d\n"
5971 " moved_to_here: %d, has_fp_ref: %d\n"
5972 " place_to_move: %d\n",
5973 fpset
->bb
->index
, fpset
->has_orig_upexposed_fpset
,
5974 place_for_upexposed_fpset
,
5975 fpset
->has_orig_downexposed_fpset
, fpset
->transparent
,
5976 fpset
->moved_to_here
, fpset
->has_fp_ref
, place_to_move
);
5978 dump_bitmap_file (file
, fpset
->can_move_to
);
5981 /* For each bb, initialize the bb_fpset structure for it. struct bb_fpset
5982 is the central data structure used in shrinkwrapping. LOCAL_INSERTS
5983 record the places where we need a fp setting locally inside bb
5984 without the need to move it. CALLS are the call insns needing a
5985 reg use note at the end of shrinkwrapping. The reg use notes are
5986 used to make sure DCE not to delete inserted fp settings. */
5988 bb_fpset_local_init (vec
<rtx
> *local_inserts
, vec
<rtx
> *calls
)
5991 rtx insn
, insert_before
= NULL
;
5992 rtx place_to_move
= NULL
;
5993 bool place_to_move_set
= false;
5996 fprintf(dump_file
, "\nfpset dump after init:\n");
5999 FOR_EACH_BB_FN (bb
, cfun
)
6001 bb_fpsets
[bb
->index
].bb
= bb
;
6002 bb_fpsets
[bb
->index
].can_move_to
=
6003 sbitmap_alloc (last_basic_block_for_fn (cfun
));
6004 bitmap_clear (bb_fpsets
[bb
->index
].can_move_to
);
6005 bb_fpsets
[bb
->index
].transparent
= true;
6006 insert_before
= NULL
;
6007 place_to_move
= NULL
;
6008 place_to_move_set
= false;
6010 FOR_BB_INSNS_REVERSE (bb
, insn
)
6012 bool fp_def_use
= false;
6016 /* Check whether fp is explicitly used in call target,
6019 CLEAR_HARD_REG_SET (used
);
6020 note_uses (&PATTERN (insn
), record_hard_reg_uses
,
6022 if (TEST_HARD_REG_BIT (used
, HARD_FRAME_POINTER_REGNUM
))
6024 calls
->safe_push (insn
);
6027 /* fp is in DF_INSN_DEFS or DF_INSN_USES of call insn in
6028 order not to move other normal fp def/use insn across call.
6029 For fp setting movement, those DEFS and USES should be
6031 if (NONDEBUG_INSN_P (insn
) && !CALL_P (insn
))
6034 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
6036 rtx reg
= DF_REF_REG (*df_rec
);
6039 && REGNO (reg
) == HARD_FRAME_POINTER_REGNUM
)
6046 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
6048 rtx reg
= DF_REF_REG (*df_rec
);
6051 && REGNO (reg
) == HARD_FRAME_POINTER_REGNUM
)
6058 /* Record place_to_move when the bottom fp reference is seen. */
6059 if (!place_to_move_set
)
6061 bb_fpsets
[bb
->index
].place_to_move
= place_to_move
;
6062 place_to_move_set
= true;
6067 /* When fp_def_use is seen, fpset should be inserted for
6068 the last seen call. Add the call to local_inserts. */
6069 local_inserts
->safe_push (insert_before
);
6070 insert_before
= NULL
;
6072 bb_fpsets
[bb
->index
].transparent
= false;
6073 bb_fpsets
[bb
->index
].has_fp_ref
= true;
6076 /* Record the insn just below the bottom fp reference in the bb.
6077 If there is fpset moved from other bbs, the fpset could be placed
6078 before place_to_move. */
6079 if (!place_to_move_set
&& NONDEBUG_INSN_P (insn
))
6080 place_to_move
= insn
;
6084 /* The latest place to insert fp setting. */
6085 insert_before
= insn
;
6086 /* No fp def or use below this call, so we have downward exposed
6088 if (!bb_fpsets
[bb
->index
].has_fp_ref
)
6089 bb_fpsets
[bb
->index
].has_orig_downexposed_fpset
= true;
6093 /* If there is no fp_def_use in bb, place_to_move will be the first
6094 insn in the bb. before place_to_move is the place to insert fpset
6095 moved from other bbs. */
6096 if (!place_to_move_set
)
6097 bb_fpsets
[bb
->index
].place_to_move
= place_to_move
;
6101 bb_fpsets
[bb
->index
].has_orig_upexposed_fpset
= true;
6102 bb_fpsets
[bb
->index
].place_for_upexposed_fpset
= insert_before
;
6105 /* For bb without explicit fp def/use, but it is in a fp live range,
6106 it is impossible to move fpset to this bb or across this bb. */
6107 if (bitmap_bit_p (df_get_live_out (bb
), HARD_FRAME_POINTER_REGNUM
))
6109 bb_fpsets
[bb
->index
].transparent
= false;
6110 bb_fpsets
[bb
->index
].place_to_move
= NULL
;
6114 dump_bb_fpset (dump_file
, &bb_fpsets
[bb
->index
]);
6118 /* Set bb_fpsets[bb->index].can_move_to, which indicates the bb set to
6119 which fp setting from bb could be moved.
6120 Suppose there is a fp setting in BB1, if there is not non-transparent
6121 BB on any path from BB2 to BB1, BB2 is the BB that the fp setting in BB1
6124 The dataflow equation is like this:
6125 Initialization before dataflow iterating:
6126 For any bb, availin[bb] = {0, 0, ...}
6127 availin[bb] = {1, 1, ...}
6129 availin[bb] = AND (availout[pred])
6130 availout[bb] = availloc OR (availin[bb] AND trans).
6132 After the iterating completes, availin[bb] contains the bitmap of the
6133 target bbs, which the fp insns in the bb are allowed to move to. */
6138 sbitmap
*availin
, *availout
, availloc
, trans
;
6139 vec
<basic_block
> worklist
;
6141 availloc
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6142 trans
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6143 availin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
),
6144 last_basic_block_for_fn (cfun
));
6145 availout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
),
6146 last_basic_block_for_fn (cfun
));
6147 bitmap_clear (trans
);
6148 bitmap_vector_clear (availin
, last_basic_block_for_fn (cfun
));
6149 bitmap_vector_clear (availout
, last_basic_block_for_fn (cfun
));
6150 worklist
.create (0);
6152 FOR_EACH_BB_FN (bb
, cfun
)
6155 worklist
.safe_push (bb
);
6156 bitmap_ones (availout
[bb
->index
]);
6159 while (!worklist
.is_empty ())
6165 bb
= worklist
.pop ();
6166 bb_index
= bb
->index
;
6168 bitmap_intersection_of_preds (availin
[bb_index
],
6170 if (!bb_fpsets
[bb_index
].transparent
)
6171 bitmap_clear (trans
);
6173 bitmap_ones (trans
);
6174 bitmap_clear (availloc
);
6175 /* If no place_to_move, it is impossible for fpsets from
6176 other bbs to be moved to here. An example of place_to_move
6177 being NULL is when fp is in the live out set of bb. */
6178 if (bb_fpsets
[bb_index
].place_to_move
)
6179 bitmap_set_bit (availloc
, bb_index
);
6181 if (bitmap_or_and (availout
[bb_index
], availloc
,
6182 availin
[bb_index
], trans
))
6183 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6187 e
->dest
->aux
= e
->dest
;
6188 worklist
.safe_insert (0, e
->dest
);
6193 FOR_EACH_BB_FN (bb
, cfun
)
6194 bitmap_copy (bb_fpsets
[bb
->index
].can_move_to
, availin
[bb
->index
]);
6196 clear_aux_for_blocks ();
6197 sbitmap_free (availloc
);
6198 sbitmap_vector_free (availin
);
6199 sbitmap_vector_free (availout
);
6202 class promote_fpset_dom_walker
: public dom_walker
6205 promote_fpset_dom_walker (cdi_direction direction
)
6206 : dom_walker (direction
) {}
6207 virtual void after_dom_children (basic_block
);
6210 /* Try to promote fp settings from the bbs in the dominator tree
6211 to its root if the promote_cost is less than non_promote_cost.
6212 If fp settings move to BBi is decided, bb_fpsets[i].move_to_here
6213 will be set to true. */
6215 promote_fpset_dom_walker::after_dom_children (basic_block bb ATTRIBUTE_UNUSED
)
6218 vec
<basic_block
> children
;
6220 int non_promote_cost
, promote_cost
;
6221 int bb_index
= bb
->index
;
6222 float promote_fraction
;
6224 /* For entry bb, there is nothing to promote. */
6227 /* For leaf bb, there is nothing to promote. */
6228 children
= get_dominated_by (CDI_DOMINATORS
, bb
);
6229 if (children
.is_empty ())
6232 if (bb_fpsets
[bb_index
].has_orig_downexposed_fpset
6233 || (bb_fpsets
[bb_index
].has_orig_upexposed_fpset
6234 && bb_fpsets
[bb_index
].transparent
))
6235 non_promote_cost
= bb
->frequency
* 10 + (!bb
->frequency
? 1 : 0);
6237 non_promote_cost
= 0;
6238 promote_cost
= bb
->frequency
* 10 + (!bb
->frequency
? 1 : 0);
6240 /* Compute promote_cost and non_promote_cost of all the bbs dominated
6242 FOR_EACH_VEC_ELT (children
, j
, child
)
6244 int index
= child
->index
;
6245 struct bb_fpset
*fpset
= &bb_fpsets
[index
];
6247 /* In the following case, fpset cannot be promoted to
6248 bb anyway. No need to involve the child into cost
6250 if ((!fpset
->transparent
&& !fpset
->has_orig_upexposed_fpset
)
6251 || !bitmap_bit_p (fpset
->can_move_to
, bb_index
))
6254 /* If subtree computation didn't choose to move fp setting to
6256 if (!fpset
->moved_to_here
)
6258 struct bb_fpset
*subnode
= fpset
->next
;
6260 /* For leaf node on dom tree, its moved_to_here is false,
6261 but it may has_orig_upexposed_fpset.
6262 For non-leaf node, suppose has_orig_upexposed_fpset is
6263 true. If it is non-transparent, we only have a fpset upward
6264 exposed. If it is transparent, moved_to_here has to be true
6265 the program will not take this branch. */
6266 if (fpset
->has_orig_upexposed_fpset
)
6268 non_promote_cost
+= child
->frequency
* 10
6269 + (!child
->frequency
? 1 : 0);
6275 non_promote_cost
+= subnode
->bb
->frequency
* 10
6276 + (!subnode
->bb
->frequency
? 1 : 0);
6277 subnode
= subnode
->next
;
6281 non_promote_cost
+= child
->frequency
* 10
6282 + (!child
->frequency
? 1 : 0);
6285 /* If promote is beneficial, mark that a fp setting to be moved to
6286 current bb, and remove upexposed fp settings or fp setting link
6287 list from its children.
6288 If promote will cost too much, don't move fp settings to current
6289 bb, but connect those scattered fp setting with a link list in
6290 the bb_fpset of current bb.
6292 If not-to-promote is chosen, there could be multiple fp setting
6293 insns scattered instead of one fp setting insn merged. It means
6294 larger code size. Introduce PARAM_FPSET_PROMOTE_FRACTION to find
6295 a balance between performance improvement and code size increase. */
6297 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
6298 promote_fraction
= 1;
6300 promote_fraction
= 1 + PARAM_VALUE (PARAM_FPSET_PROMOTE_FRACTION
) * 0.1;
6302 if ((promote_cost
<= (non_promote_cost
* promote_fraction
)
6303 && bb_fpsets
[bb_index
].place_to_move
)
6304 || bb_fpsets
[bb_index
].has_orig_downexposed_fpset
)
6306 /* move fp set to bb. */
6307 FOR_EACH_VEC_ELT (children
, j
, child
)
6309 int index
= child
->index
;
6310 struct bb_fpset
*fpset
= &bb_fpsets
[index
];
6311 struct bb_fpset
*subnode
= fpset
->next
;
6313 /* In the following case, fpsets cannot be promoted to
6314 bb. Leave those fpsets unchanged. */
6315 if ((!fpset
->transparent
&& !fpset
->has_orig_upexposed_fpset
)
6316 || !bitmap_bit_p (fpset
->can_move_to
, bb_index
))
6321 struct bb_fpset
*next
= subnode
->next
;
6322 /* Reset the subnode. */
6323 subnode
->next
= subnode
->end
= NULL
;
6324 subnode
->moved_to_here
= false;
6325 subnode
->has_orig_upexposed_fpset
= false;
6329 fpset
->next
= fpset
->end
= NULL
;
6330 fpset
->moved_to_here
= false;
6331 fpset
->has_orig_upexposed_fpset
= false;
6334 /* If bb has_orig_downexposed_fpset, there will already be a
6335 fpset inserted by local_inserts, so don't set moved_to_here. */
6336 if (!bb_fpsets
[bb_index
].has_orig_downexposed_fpset
)
6337 bb_fpsets
[bb_index
].moved_to_here
= true;
6341 /* Connect all the link lists from children and set
6342 bb_fpsets[bb_index].next to the head of it. */
6343 FOR_EACH_VEC_ELT (children
, j
, child
)
6345 int index
= child
->index
;
6346 struct bb_fpset
*fpset
= &bb_fpsets
[index
];
6347 if ((!fpset
->transparent
&& !fpset
->has_orig_upexposed_fpset
)
6348 || !bitmap_bit_p (fpset
->can_move_to
, bb_index
))
6350 if ((fpset
->moved_to_here
&& fpset
->transparent
)
6351 || fpset
->has_orig_upexposed_fpset
)
6353 /* Only the child has to be added to the bb's link list. */
6354 struct bb_fpset
*tmp
;
6355 tmp
= bb_fpsets
[bb_index
].next
;
6356 bb_fpsets
[bb_index
].next
= fpset
;
6358 if (!bb_fpsets
[bb_index
].end
)
6359 bb_fpsets
[bb_index
].end
= fpset
;
6361 #ifdef ENABLE_CHECKING
6362 fpset_list_sanity_check (&bb_fpsets
[bb_index
]);
6365 else if (!fpset
->moved_to_here
)
6367 /* The link list of the child should be added to the
6370 /* If fpset contains an empty link list, nothing has
6374 gcc_assert (!fpset
->end
);
6377 fpset
->end
->next
= bb_fpsets
[bb_index
].next
;
6378 bb_fpsets
[bb_index
].next
= fpset
->next
;
6379 if (!bb_fpsets
[bb_index
].end
)
6380 bb_fpsets
[bb_index
].end
= fpset
->end
;
6381 fpset
->next
= fpset
->end
= NULL
;
6382 #ifdef ENABLE_CHECKING
6383 fpset_list_sanity_check (&bb_fpsets
[bb_index
]);
6390 /* Choose places to move fp settings. fp settings will be moved
6391 to places before insns saved in GLOBAL_INSERTS. */
6393 choose_places (vec
<rtx
> *global_inserts
)
6396 basic_block prologue_bb
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6397 bool prologue_bb_trans
= bb_fpsets
[prologue_bb
->index
].transparent
;
6398 fpset_needed_in_prologue
= false;
6402 /* Iterate dominator tree from bottom to top and try to move
6403 fp settings upwards. */
6404 calculate_dominance_info (CDI_DOMINATORS
);
6405 promote_fpset_dom_walker (CDI_DOMINATORS
)
6406 .walk (cfun
->cfg
->x_entry_block_ptr
);
6407 free_dominance_info (CDI_DOMINATORS
);
6411 fprintf(dump_file
, "\nfpset dump after choose places:\n");
6412 FOR_EACH_BB_FN (bb
, cfun
)
6413 dump_bb_fpset (dump_file
, &bb_fpsets
[bb
->index
]);
6416 calculate_dominance_info (CDI_POST_DOMINATORS
);
6417 /* Save the places to move fp setting in global_inserts vector. */
6418 FOR_EACH_BB_FN (bb
, cfun
)
6420 bool postponed_to_prologue
= false;
6421 struct bb_fpset
*fpset
= &bb_fpsets
[bb
->index
];
6422 /* If a fp setting insn needs to be inserted in a bb equivalent with
6423 prologue bb on dominator tree, and the prologue bb is transparent,
6424 and prologue bb is in the can_move_to set of the target bb, replace
6425 the fp setting insn with "mov sp fp" in prologue. This is to reduce
6426 code size. It will postpone the insertion of move fp setting insn
6427 until prologue generation. */
6428 if (prologue_bb_trans
6429 && (bb
== prologue_bb
6430 || (dominated_by_p (CDI_POST_DOMINATORS
,
6432 && bitmap_bit_p (fpset
->can_move_to
,
6433 prologue_bb
->index
))))
6434 postponed_to_prologue
= true;
6435 if (fpset
->moved_to_here
&& !fpset
->transparent
)
6437 gcc_assert (fpset
->place_to_move
);
6438 global_inserts
->safe_push (fpset
->place_to_move
);
6440 else if (fpset
->moved_to_here
&& fpset
->transparent
6441 && !fpset
->has_orig_upexposed_fpset
)
6443 gcc_assert (fpset
->place_to_move
);
6444 if (postponed_to_prologue
)
6445 fpset_needed_in_prologue
|= postponed_to_prologue
;
6447 global_inserts
->safe_push (fpset
->place_to_move
);
6450 if (fpset
->has_orig_upexposed_fpset
)
6452 if (postponed_to_prologue
)
6453 fpset_needed_in_prologue
|= postponed_to_prologue
;
6455 global_inserts
->safe_push (fpset
->place_for_upexposed_fpset
);
6458 free_dominance_info (CDI_POST_DOMINATORS
);
6461 /* Insert frame pointer setting insns before the places specified
6462 in LOCAL_INSERTS and GLOBAL_INSERTS. */
6465 insert_fp_setting (vec
<rtx
> *local_inserts
, vec
<rtx
> *global_inserts
,
6468 bool any_insert
= false;
6469 rtx seq_end
, seq_start
, dup
, insert_before
;
6472 targetm
.set_fp_insn ();
6473 seq_start
= get_insns ();
6474 seq_end
= get_last_insn ();
6480 while (!local_inserts
->is_empty ())
6482 insert_before
= local_inserts
->pop ();
6483 if (dbg_cnt (fpset_insert
))
6486 duplicate_insn_chain (seq_start
, seq_end
);
6489 emit_insn_before (dup
, insert_before
);
6494 while (!global_inserts
->is_empty ())
6496 insert_before
= global_inserts
->pop ();
6497 if (dbg_cnt (fpset_insert
))
6500 duplicate_insn_chain (seq_start
, seq_end
);
6503 emit_insn_before (dup
, insert_before
);
6508 /* If there is any fpsetting inserted, add fp use note in every
6509 call. This is to avoid DCE to delete fp setting inserted here. */
6510 if (any_insert
|| fpset_needed_in_prologue
)
6511 while (!calls
->is_empty ())
6513 rtx
*call_fusage
, call
;
6514 call
= calls
->pop();
6515 call_fusage
= &CALL_INSN_FUNCTION_USAGE (call
);
6516 use_reg_mode (call_fusage
, hard_frame_pointer_rtx
,
6517 GET_MODE (hard_frame_pointer_rtx
));
6518 CALL_INSN_FUNCTION_USAGE (call
) = *call_fusage
;
6522 /* FP shrinkwrapping.
6523 Initially it is assumed that every call needs a fp setting
6524 immediately before it to save the caller's frame address. fp
6525 shrinkwrapping will try to promote those fp settings from its
6526 original bb to its dominator if it is legal and beneficial.
6527 The promotion process is a bottom-up traversal of dominator
6533 /* local_inserts save places to insert local fp settings which
6534 could not be promoted outside its original bb. global_inserts
6535 save places to insert the rest of fp settings. calls save call
6536 insns for which we need to add reg use note. */
6537 vec
<rtx
> local_inserts
, global_inserts
, calls
;
6539 local_inserts
.create(0);
6540 global_inserts
.create(0);
6542 bb_fpsets
= XNEWVEC (struct bb_fpset
, last_basic_block_for_fn (cfun
));
6543 memset (&bb_fpsets
[0], 0,
6544 sizeof (struct bb_fpset
) * last_basic_block_for_fn (cfun
));
6546 bb_fpset_local_init (&local_inserts
, &calls
);
6547 choose_places (&global_inserts
);
6548 insert_fp_setting (&local_inserts
, &global_inserts
, &calls
);
6550 /* After we insert fp setting and do fp shrinkwrapping, fp will not be
6551 invalidated by call implicitly. */
6552 if (frame_pointer_partially_needed
)
6553 CLEAR_HARD_REG_BIT (regs_invalidated_by_call
, HARD_FRAME_POINTER_REGNUM
);
6555 local_inserts
.release ();
6556 global_inserts
.release ();
6562 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6563 this into place with notes indicating where the prologue ends and where
6564 the epilogue begins. Update the basic block information when possible.
6566 Notes on epilogue placement:
6567 There are several kinds of edges to the exit block:
6568 * a single fallthru edge from LAST_BB
6569 * possibly, edges from blocks containing sibcalls
6570 * possibly, fake edges from infinite loops
6572 The epilogue is always emitted on the fallthru edge from the last basic
6573 block in the function, LAST_BB, into the exit block.
6575 If LAST_BB is empty except for a label, it is the target of every
6576 other basic block in the function that ends in a return. If a
6577 target has a return or simple_return pattern (possibly with
6578 conditional variants), these basic blocks can be changed so that a
6579 return insn is emitted into them, and their target is adjusted to
6580 the real exit block.
6582 Notes on shrink wrapping: We implement a fairly conservative
6583 version of shrink-wrapping rather than the textbook one. We only
6584 generate a single prologue and a single epilogue. This is
6585 sufficient to catch a number of interesting cases involving early
6588 First, we identify the blocks that require the prologue to occur before
6589 them. These are the ones that modify a call-saved register, or reference
6590 any of the stack or frame pointer registers. To simplify things, we then
6591 mark everything reachable from these blocks as also requiring a prologue.
6592 This takes care of loops automatically, and avoids the need to examine
6593 whether MEMs reference the frame, since it is sufficient to check for
6594 occurrences of the stack or frame pointer.
6596 We then compute the set of blocks for which the need for a prologue
6597 is anticipatable (borrowing terminology from the shrink-wrapping
6598 description in Muchnick's book). These are the blocks which either
6599 require a prologue themselves, or those that have only successors
6600 where the prologue is anticipatable. The prologue needs to be
6601 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6602 is not. For the moment, we ensure that only one such edge exists.
6604 The epilogue is placed as described above, but we make a
6605 distinction between inserting return and simple_return patterns
6606 when modifying other blocks that end in a return. Blocks that end
6607 in a sibcall omit the sibcall_epilogue if the block is not in
6611 thread_prologue_and_epilogue_insns (void)
6614 #ifdef HAVE_simple_return
6615 vec
<edge
> unconverted_simple_returns
= vNULL
;
6616 bool nonempty_prologue
;
6617 bitmap_head bb_flags
;
6618 unsigned max_grow_size
;
6621 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
6622 rtx prologue_seq ATTRIBUTE_UNUSED
, split_prologue_seq ATTRIBUTE_UNUSED
;
6623 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
6628 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6632 epilogue_end
= NULL_RTX
;
6633 returnjump
= NULL_RTX
;
6635 /* Can't deal with multiple successors of the entry block at the
6636 moment. Function should always have at least one entry
6638 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6639 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6640 orig_entry_edge
= entry_edge
;
6642 split_prologue_seq
= NULL_RTX
;
6643 if (flag_split_stack
6644 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
6647 #ifndef HAVE_split_stack_prologue
6650 gcc_assert (HAVE_split_stack_prologue
);
6653 emit_insn (gen_split_stack_prologue ());
6654 split_prologue_seq
= get_insns ();
6657 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
6658 set_insn_locations (split_prologue_seq
, prologue_location
);
6662 /* Insert frame pointer setting insns before calls. */
6663 if (frame_pointer_partially_needed
)
6664 move_fp_settings ();
6666 prologue_seq
= NULL_RTX
;
6667 #ifdef HAVE_prologue
6671 seq
= gen_prologue ();
6674 /* Insert an explicit USE for the frame pointer
6675 if the profiling is on and the frame pointer is required. */
6676 if (crtl
->profile
&& frame_pointer_needed
)
6677 emit_use (hard_frame_pointer_rtx
);
6679 /* Retain a map of the prologue insns. */
6680 record_insns (seq
, NULL
, &prologue_insn_hash
);
6681 emit_note (NOTE_INSN_PROLOGUE_END
);
6683 /* Ensure that instructions are not moved into the prologue when
6684 profiling is on. The call to the profiling routine can be
6685 emitted within the live range of a call-clobbered register. */
6686 if (!targetm
.profile_before_prologue () && crtl
->profile
)
6687 emit_insn (gen_blockage ());
6689 prologue_seq
= get_insns ();
6691 set_insn_locations (prologue_seq
, prologue_location
);
6695 #ifdef HAVE_simple_return
6696 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
6698 /* Try to perform a kind of shrink-wrapping, making sure the
6699 prologue/epilogue is emitted only around those parts of the
6700 function that require it. */
6702 nonempty_prologue
= false;
6703 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
6704 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
6706 nonempty_prologue
= true;
6710 if (flag_shrink_wrap
&& HAVE_simple_return
6711 && (targetm
.profile_before_prologue () || !crtl
->profile
)
6712 && nonempty_prologue
&& !crtl
->calls_eh_return
)
6714 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
6715 struct hard_reg_set_container set_up_by_prologue
;
6717 vec
<basic_block
> vec
;
6719 bitmap_head bb_antic_flags
;
6720 bitmap_head bb_on_list
;
6721 bitmap_head bb_tail
;
6724 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
6726 /* Compute the registers set and used in the prologue. */
6727 CLEAR_HARD_REG_SET (prologue_clobbered
);
6728 CLEAR_HARD_REG_SET (prologue_used
);
6729 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
6731 HARD_REG_SET this_used
;
6732 if (!NONDEBUG_INSN_P (p_insn
))
6735 CLEAR_HARD_REG_SET (this_used
);
6736 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
6738 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
6739 IOR_HARD_REG_SET (prologue_used
, this_used
);
6740 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
6741 &prologue_clobbered
);
6744 prepare_shrink_wrap (entry_edge
->dest
);
6746 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
6747 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
6748 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
6750 /* Find the set of basic blocks that require a stack frame,
6751 and blocks that are too big to be duplicated. */
6753 vec
.create (n_basic_blocks_for_fn (cfun
));
6755 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
6756 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6757 STACK_POINTER_REGNUM
);
6758 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
6759 if (frame_pointer_needed
|| frame_pointer_partially_needed
)
6760 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6761 HARD_FRAME_POINTER_REGNUM
);
6762 if (pic_offset_table_rtx
)
6763 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6764 PIC_OFFSET_TABLE_REGNUM
);
6766 add_to_hard_reg_set (&set_up_by_prologue
.set
,
6767 GET_MODE (crtl
->drap_reg
),
6768 REGNO (crtl
->drap_reg
));
6769 if (targetm
.set_up_by_prologue
)
6770 targetm
.set_up_by_prologue (&set_up_by_prologue
);
6772 /* We don't use a different max size depending on
6773 optimize_bb_for_speed_p because increasing shrink-wrapping
6774 opportunities by duplicating tail blocks can actually result
6775 in an overall decrease in code size. */
6776 max_grow_size
= get_uncond_jump_length ();
6777 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
6779 FOR_EACH_BB_FN (bb
, cfun
)
6784 FOR_BB_INSNS (bb
, insn
)
6785 if (NONDEBUG_INSN_P (insn
))
6787 if (requires_stack_frame_p (insn
, prologue_used
,
6788 set_up_by_prologue
.set
))
6790 if (bb
== entry_edge
->dest
)
6791 goto fail_shrinkwrap
;
6792 bitmap_set_bit (&bb_flags
, bb
->index
);
6793 vec
.quick_push (bb
);
6796 else if (size
<= max_grow_size
)
6798 size
+= get_attr_min_length (insn
);
6799 if (size
> max_grow_size
)
6800 bitmap_set_bit (&bb_on_list
, bb
->index
);
6805 /* Blocks that really need a prologue, or are too big for tails. */
6806 bitmap_ior_into (&bb_on_list
, &bb_flags
);
6808 /* For every basic block that needs a prologue, mark all blocks
6809 reachable from it, so as to ensure they are also seen as
6810 requiring a prologue. */
6811 while (!vec
.is_empty ())
6813 basic_block tmp_bb
= vec
.pop ();
6815 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6816 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6817 && bitmap_set_bit (&bb_flags
, e
->dest
->index
))
6818 vec
.quick_push (e
->dest
);
6821 /* Find the set of basic blocks that need no prologue, have a
6822 single successor, can be duplicated, meet a max size
6823 requirement, and go to the exit via like blocks. */
6824 vec
.quick_push (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6825 while (!vec
.is_empty ())
6827 basic_block tmp_bb
= vec
.pop ();
6829 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6830 if (single_succ_p (e
->src
)
6831 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
6832 && can_duplicate_block_p (e
->src
))
6837 /* If there is predecessor of e->src which doesn't
6838 need prologue and the edge is complex,
6839 we might not be able to redirect the branch
6840 to a copy of e->src. */
6841 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
6842 if ((pe
->flags
& EDGE_COMPLEX
) != 0
6843 && !bitmap_bit_p (&bb_flags
, pe
->src
->index
))
6845 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
6846 vec
.quick_push (e
->src
);
6850 /* Now walk backwards from every block that is marked as needing
6851 a prologue to compute the bb_antic_flags bitmap. Exclude
6852 tail blocks; They can be duplicated to be used on paths not
6853 needing a prologue. */
6854 bitmap_clear (&bb_on_list
);
6855 bitmap_and_compl (&bb_antic_flags
, &bb_flags
, &bb_tail
);
6856 FOR_EACH_BB_FN (bb
, cfun
)
6858 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6860 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6861 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6862 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6863 vec
.quick_push (e
->src
);
6865 while (!vec
.is_empty ())
6867 basic_block tmp_bb
= vec
.pop ();
6868 bool all_set
= true;
6870 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
6871 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6872 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
6880 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
6881 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6882 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6883 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6884 vec
.quick_push (e
->src
);
6887 /* Find exactly one edge that leads to a block in ANTIC from
6888 a block that isn't. */
6889 if (!bitmap_bit_p (&bb_antic_flags
, entry_edge
->dest
->index
))
6890 FOR_EACH_BB_FN (bb
, cfun
)
6892 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6894 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6895 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
6897 if (entry_edge
!= orig_entry_edge
)
6899 entry_edge
= orig_entry_edge
;
6901 fprintf (dump_file
, "More than one candidate edge.\n");
6902 goto fail_shrinkwrap
;
6905 fprintf (dump_file
, "Found candidate edge for "
6906 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
6912 if (entry_edge
!= orig_entry_edge
)
6914 /* Test whether the prologue is known to clobber any register
6915 (other than FP or SP) which are live on the edge. */
6916 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
6917 if (frame_pointer_needed
)
6918 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
6919 REG_SET_TO_HARD_REG_SET (live_on_edge
,
6920 df_get_live_in (entry_edge
->dest
));
6921 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
6923 entry_edge
= orig_entry_edge
;
6926 "Shrink-wrapping aborted due to clobber.\n");
6929 if (entry_edge
!= orig_entry_edge
)
6931 crtl
->shrink_wrapped
= true;
6933 fprintf (dump_file
, "Performing shrink-wrapping.\n");
6935 /* Find tail blocks reachable from both blocks needing a
6936 prologue and blocks not needing a prologue. */
6937 if (!bitmap_empty_p (&bb_tail
))
6938 FOR_EACH_BB_FN (bb
, cfun
)
6940 bool some_pro
, some_no_pro
;
6941 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
6943 some_pro
= some_no_pro
= false;
6944 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6946 if (bitmap_bit_p (&bb_flags
, e
->src
->index
))
6951 if (some_pro
&& some_no_pro
)
6952 vec
.quick_push (bb
);
6954 bitmap_clear_bit (&bb_tail
, bb
->index
);
6956 /* Find the head of each tail. */
6957 while (!vec
.is_empty ())
6959 basic_block tbb
= vec
.pop ();
6961 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
6964 while (single_succ_p (tbb
))
6966 tbb
= single_succ (tbb
);
6967 bitmap_clear_bit (&bb_tail
, tbb
->index
);
6970 /* Now duplicate the tails. */
6971 if (!bitmap_empty_p (&bb_tail
))
6972 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6974 basic_block copy_bb
, tbb
;
6978 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
6981 /* Create a copy of BB, instructions and all, for
6982 use on paths that don't need a prologue.
6983 Ideal placement of the copy is on a fall-thru edge
6984 or after a block that would jump to the copy. */
6985 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6986 if (!bitmap_bit_p (&bb_flags
, e
->src
->index
)
6987 && single_succ_p (e
->src
))
6991 /* Make sure we insert after any barriers. */
6992 rtx end
= get_last_bb_insn (e
->src
);
6993 copy_bb
= create_basic_block (NEXT_INSN (end
),
6995 BB_COPY_PARTITION (copy_bb
, e
->src
);
6999 /* Otherwise put the copy at the end of the function. */
7000 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
7001 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
7002 BB_COPY_PARTITION (copy_bb
, bb
);
7005 insert_point
= emit_note_after (NOTE_INSN_DELETED
,
7007 emit_barrier_after (BB_END (copy_bb
));
7012 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
7014 tbb
= single_succ (tbb
);
7015 if (tbb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7017 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
7021 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
7022 We have yet to add a simple_return to the tails,
7023 as we'd like to first convert_jumps_to_returns in
7024 case the block is no longer used after that. */
7026 if (CALL_P (PREV_INSN (insert_point
))
7027 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
7028 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
7029 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
7032 /* verify_flow_info doesn't like a note after a
7034 delete_insn (insert_point
);
7035 if (bitmap_empty_p (&bb_tail
))
7041 bitmap_clear (&bb_tail
);
7042 bitmap_clear (&bb_antic_flags
);
7043 bitmap_clear (&bb_on_list
);
7048 if (split_prologue_seq
!= NULL_RTX
)
7050 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
7053 if (prologue_seq
!= NULL_RTX
)
7055 insert_insn_on_edge (prologue_seq
, entry_edge
);
7059 /* If the exit block has no non-fake predecessors, we don't need
7061 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7062 if ((e
->flags
& EDGE_FAKE
) == 0)
7067 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
7069 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
7071 /* If we're allowed to generate a simple return instruction, then by
7072 definition we don't need a full epilogue. If the last basic
7073 block before the exit block does not contain active instructions,
7074 examine its predecessors and try to emit (conditional) return
7076 #ifdef HAVE_simple_return
7077 if (entry_edge
!= orig_entry_edge
)
7083 /* convert_jumps_to_returns may add to preds of the exit block
7084 (but won't remove). Stop at end of current preds. */
7085 last
= EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
7086 for (i
= 0; i
< last
; i
++)
7088 e
= EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
, i
);
7089 if (LABEL_P (BB_HEAD (e
->src
))
7090 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
7091 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
7092 unconverted_simple_returns
7093 = convert_jumps_to_returns (e
->src
, true,
7094 unconverted_simple_returns
);
7098 if (exit_fallthru_edge
!= NULL
7099 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
7100 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
7102 basic_block last_bb
;
7104 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
7105 returnjump
= BB_END (last_bb
);
7106 exit_fallthru_edge
= NULL
;
7113 if (exit_fallthru_edge
== NULL
)
7118 basic_block last_bb
= exit_fallthru_edge
->src
;
7120 if (LABEL_P (BB_HEAD (last_bb
))
7121 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
7122 convert_jumps_to_returns (last_bb
, false, vNULL
);
7124 if (EDGE_COUNT (last_bb
->preds
) != 0
7125 && single_succ_p (last_bb
))
7127 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
7128 epilogue_end
= returnjump
= BB_END (last_bb
);
7129 #ifdef HAVE_simple_return
7130 /* Emitting the return may add a basic block.
7131 Fix bb_flags for the added block. */
7132 if (last_bb
!= exit_fallthru_edge
->src
)
7133 bitmap_set_bit (&bb_flags
, last_bb
->index
);
7141 /* A small fib -- epilogue is not yet completed, but we wish to re-use
7142 this marker for the splits of EH_RETURN patterns, and nothing else
7143 uses the flag in the meantime. */
7144 epilogue_completed
= 1;
7146 #ifdef HAVE_eh_return
7147 /* Find non-fallthru edges that end with EH_RETURN instructions. On
7148 some targets, these get split to a special version of the epilogue
7149 code. In order to be able to properly annotate these with unwind
7150 info, try to split them now. If we get a valid split, drop an
7151 EPILOGUE_BEG note and mark the insns as epilogue insns. */
7152 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7154 rtx prev
, last
, trial
;
7156 if (e
->flags
& EDGE_FALLTHRU
)
7158 last
= BB_END (e
->src
);
7159 if (!eh_returnjump_p (last
))
7162 prev
= PREV_INSN (last
);
7163 trial
= try_split (PATTERN (last
), last
, 1);
7167 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
7168 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
7172 /* If nothing falls through into the exit block, we don't need an
7175 if (exit_fallthru_edge
== NULL
)
7178 #ifdef HAVE_epilogue
7182 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
7183 seq
= gen_epilogue ();
7185 emit_jump_insn (seq
);
7187 /* Retain a map of the epilogue insns. */
7188 record_insns (seq
, NULL
, &epilogue_insn_hash
);
7189 set_insn_locations (seq
, epilogue_location
);
7192 returnjump
= get_last_insn ();
7195 insert_insn_on_edge (seq
, exit_fallthru_edge
);
7198 if (JUMP_P (returnjump
))
7199 set_return_jump_label (returnjump
);
7206 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
7208 /* We have a fall-through edge to the exit block, the source is not
7209 at the end of the function, and there will be an assembler epilogue
7210 at the end of the function.
7211 We can't use force_nonfallthru here, because that would try to
7212 use return. Inserting a jump 'by hand' is extremely messy, so
7213 we take advantage of cfg_layout_finalize using
7214 fixup_fallthru_exit_predecessor. */
7215 cfg_layout_initialize (0);
7216 FOR_EACH_BB_FN (cur_bb
, cfun
)
7217 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
7218 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
7219 cur_bb
->aux
= cur_bb
->next_bb
;
7220 cfg_layout_finalize ();
7225 default_rtl_profile ();
7231 commit_edge_insertions ();
7233 /* Look for basic blocks within the prologue insns. */
7234 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7235 bitmap_clear (blocks
);
7236 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
7237 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
7238 find_many_sub_basic_blocks (blocks
);
7239 sbitmap_free (blocks
);
7241 /* The epilogue insns we inserted may cause the exit edge to no longer
7243 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7245 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
7246 && returnjump_p (BB_END (e
->src
)))
7247 e
->flags
&= ~EDGE_FALLTHRU
;
7251 #ifdef HAVE_simple_return
7252 /* If there were branches to an empty LAST_BB which we tried to
7253 convert to conditional simple_returns, but couldn't for some
7254 reason, create a block to hold a simple_return insn and redirect
7255 those remaining edges. */
7256 if (!unconverted_simple_returns
.is_empty ())
7258 basic_block simple_return_block_hot
= NULL
;
7259 basic_block simple_return_block_cold
= NULL
;
7260 edge pending_edge_hot
= NULL
;
7261 edge pending_edge_cold
= NULL
;
7262 basic_block exit_pred
;
7265 gcc_assert (entry_edge
!= orig_entry_edge
);
7267 /* See if we can reuse the last insn that was emitted for the
7269 if (returnjump
!= NULL_RTX
7270 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
7272 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
7273 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
7274 simple_return_block_hot
= e
->dest
;
7276 simple_return_block_cold
= e
->dest
;
7279 /* Also check returns we might need to add to tail blocks. */
7280 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7281 if (EDGE_COUNT (e
->src
->preds
) != 0
7282 && (e
->flags
& EDGE_FAKE
) != 0
7283 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
7285 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
7286 pending_edge_hot
= e
;
7288 pending_edge_cold
= e
;
7291 /* Save a pointer to the exit's predecessor BB for use in
7292 inserting new BBs at the end of the function. Do this
7293 after the call to split_block above which may split
7294 the original exit pred. */
7295 exit_pred
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7297 FOR_EACH_VEC_ELT (unconverted_simple_returns
, i
, e
)
7299 basic_block
*pdest_bb
;
7302 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
7304 pdest_bb
= &simple_return_block_hot
;
7305 pending
= pending_edge_hot
;
7309 pdest_bb
= &simple_return_block_cold
;
7310 pending
= pending_edge_cold
;
7313 if (*pdest_bb
== NULL
&& pending
!= NULL
)
7315 emit_return_into_block (true, pending
->src
);
7316 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
7317 *pdest_bb
= pending
->src
;
7319 else if (*pdest_bb
== NULL
)
7324 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
7325 BB_COPY_PARTITION (bb
, e
->src
);
7326 start
= emit_jump_insn_after (gen_simple_return (),
7328 JUMP_LABEL (start
) = simple_return_rtx
;
7329 emit_barrier_after (start
);
7332 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7334 redirect_edge_and_branch_force (e
, *pdest_bb
);
7336 unconverted_simple_returns
.release ();
7339 if (entry_edge
!= orig_entry_edge
)
7341 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7342 if (EDGE_COUNT (e
->src
->preds
) != 0
7343 && (e
->flags
& EDGE_FAKE
) != 0
7344 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
7346 emit_return_into_block (true, e
->src
);
7347 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
7352 #ifdef HAVE_sibcall_epilogue
7353 /* Emit sibling epilogues before any sibling call sites. */
7354 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
); (e
=
7358 basic_block bb
= e
->src
;
7359 rtx insn
= BB_END (bb
);
7363 || ! SIBLING_CALL_P (insn
)
7364 #ifdef HAVE_simple_return
7365 || (entry_edge
!= orig_entry_edge
7366 && !bitmap_bit_p (&bb_flags
, bb
->index
))
7374 ep_seq
= gen_sibcall_epilogue ();
7378 emit_note (NOTE_INSN_EPILOGUE_BEG
);
7383 /* Retain a map of the epilogue insns. Used in life analysis to
7384 avoid getting rid of sibcall epilogue insns. Do this before we
7385 actually emit the sequence. */
7386 record_insns (seq
, NULL
, &epilogue_insn_hash
);
7387 set_insn_locations (seq
, epilogue_location
);
7389 emit_insn_before (seq
, insn
);
7395 #ifdef HAVE_epilogue
7400 /* Similarly, move any line notes that appear after the epilogue.
7401 There is no need, however, to be quite so anal about the existence
7402 of such a note. Also possibly move
7403 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
7405 for (insn
= epilogue_end
; insn
; insn
= next
)
7407 next
= NEXT_INSN (insn
);
7409 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
7410 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
7415 #ifdef HAVE_simple_return
7416 bitmap_clear (&bb_flags
);
7419 /* Threading the prologue and epilogue changes the artificial refs
7420 in the entry and exit blocks. */
7421 epilogue_completed
= 1;
7422 df_update_entry_exit_and_calls ();
7425 /* Reposition the prologue-end and epilogue-begin notes after
7426 instruction scheduling. */
7429 reposition_prologue_and_epilogue_notes (void)
7431 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
7432 || defined (HAVE_sibcall_epilogue)
7433 /* Since the hash table is created on demand, the fact that it is
7434 non-null is a signal that it is non-empty. */
7435 if (prologue_insn_hash
!= NULL
)
7437 size_t len
= htab_elements (prologue_insn_hash
);
7438 rtx insn
, last
= NULL
, note
= NULL
;
7440 /* Scan from the beginning until we reach the last prologue insn. */
7441 /* ??? While we do have the CFG intact, there are two problems:
7442 (1) The prologue can contain loops (typically probing the stack),
7443 which means that the end of the prologue isn't in the first bb.
7444 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
7445 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
7449 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
7452 else if (contains (insn
, prologue_insn_hash
))
7464 /* Scan forward looking for the PROLOGUE_END note. It should
7465 be right at the beginning of the block, possibly with other
7466 insn notes that got moved there. */
7467 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
7470 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
7475 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
7477 last
= NEXT_INSN (last
);
7478 reorder_insns (note
, note
, last
);
7482 if (epilogue_insn_hash
!= NULL
)
7487 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7489 rtx insn
, first
= NULL
, note
= NULL
;
7490 basic_block bb
= e
->src
;
7492 /* Scan from the beginning until we reach the first epilogue insn. */
7493 FOR_BB_INSNS (bb
, insn
)
7497 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
7504 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
7514 /* If the function has a single basic block, and no real
7515 epilogue insns (e.g. sibcall with no cleanup), the
7516 epilogue note can get scheduled before the prologue
7517 note. If we have frame related prologue insns, having
7518 them scanned during the epilogue will result in a crash.
7519 In this case re-order the epilogue note to just before
7520 the last insn in the block. */
7522 first
= BB_END (bb
);
7524 if (PREV_INSN (first
) != note
)
7525 reorder_insns (note
, note
, PREV_INSN (first
));
7529 #endif /* HAVE_prologue or HAVE_epilogue */
7532 /* Returns the name of function declared by FNDECL. */
7534 fndecl_name (tree fndecl
)
7538 return lang_hooks
.decl_printable_name (fndecl
, 2);
7541 /* Returns the name of function FN. */
7543 function_name (struct function
*fn
)
7545 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
7546 return fndecl_name (fndecl
);
7549 /* Returns the name of the current function. */
7551 current_function_name (void)
7553 return function_name (cfun
);
7558 rest_of_handle_check_leaf_regs (void)
7560 #ifdef LEAF_REGISTERS
7561 crtl
->uses_only_leaf_regs
7562 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
7567 /* Insert a TYPE into the used types hash table of CFUN. */
7570 used_types_insert_helper (tree type
, struct function
*func
)
7572 if (type
!= NULL
&& func
!= NULL
)
7576 if (func
->used_types_hash
== NULL
)
7577 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
7578 htab_eq_pointer
, NULL
);
7579 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
7585 /* Given a type, insert it into the used hash table in cfun. */
7587 used_types_insert (tree t
)
7589 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
7594 if (TREE_CODE (t
) == ERROR_MARK
)
7596 if (TYPE_NAME (t
) == NULL_TREE
7597 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
7598 t
= TYPE_MAIN_VARIANT (t
);
7599 if (debug_info_level
> DINFO_LEVEL_NONE
)
7602 used_types_insert_helper (t
, cfun
);
7605 /* So this might be a type referenced by a global variable.
7606 Record that type so that we can later decide to emit its
7607 debug information. */
7608 vec_safe_push (types_used_by_cur_var_decl
, t
);
7613 /* Helper to Hash a struct types_used_by_vars_entry. */
7616 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
7618 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
7620 return iterative_hash_object (entry
->type
,
7621 iterative_hash_object (entry
->var_decl
, 0));
7624 /* Hash function of the types_used_by_vars_entry hash table. */
7627 types_used_by_vars_do_hash (const void *x
)
7629 const struct types_used_by_vars_entry
*entry
=
7630 (const struct types_used_by_vars_entry
*) x
;
7632 return hash_types_used_by_vars_entry (entry
);
7635 /*Equality function of the types_used_by_vars_entry hash table. */
7638 types_used_by_vars_eq (const void *x1
, const void *x2
)
7640 const struct types_used_by_vars_entry
*e1
=
7641 (const struct types_used_by_vars_entry
*) x1
;
7642 const struct types_used_by_vars_entry
*e2
=
7643 (const struct types_used_by_vars_entry
*)x2
;
7645 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
7648 /* Inserts an entry into the types_used_by_vars_hash hash table. */
7651 types_used_by_var_decl_insert (tree type
, tree var_decl
)
7653 if (type
!= NULL
&& var_decl
!= NULL
)
7656 struct types_used_by_vars_entry e
;
7657 e
.var_decl
= var_decl
;
7659 if (types_used_by_vars_hash
== NULL
)
7660 types_used_by_vars_hash
=
7661 htab_create_ggc (37, types_used_by_vars_do_hash
,
7662 types_used_by_vars_eq
, NULL
);
7663 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
7664 hash_types_used_by_vars_entry (&e
), INSERT
);
7667 struct types_used_by_vars_entry
*entry
;
7668 entry
= ggc_alloc_types_used_by_vars_entry ();
7670 entry
->var_decl
= var_decl
;
7678 const pass_data pass_data_leaf_regs
=
7680 RTL_PASS
, /* type */
7681 "*leaf_regs", /* name */
7682 OPTGROUP_NONE
, /* optinfo_flags */
7683 false, /* has_gate */
7684 true, /* has_execute */
7685 TV_NONE
, /* tv_id */
7686 0, /* properties_required */
7687 0, /* properties_provided */
7688 0, /* properties_destroyed */
7689 0, /* todo_flags_start */
7690 0, /* todo_flags_finish */
7693 class pass_leaf_regs
: public rtl_opt_pass
7696 pass_leaf_regs (gcc::context
*ctxt
)
7697 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
7700 /* opt_pass methods: */
7701 unsigned int execute () { return rest_of_handle_check_leaf_regs (); }
7703 }; // class pass_leaf_regs
7708 make_pass_leaf_regs (gcc::context
*ctxt
)
7710 return new pass_leaf_regs (ctxt
);
7714 rest_of_handle_thread_prologue_and_epilogue (void)
7717 cleanup_cfg (CLEANUP_EXPENSIVE
);
7719 /* On some machines, the prologue and epilogue code, or parts thereof,
7720 can be represented as RTL. Doing so lets us schedule insns between
7721 it and the rest of the code and also allows delayed branch
7722 scheduling to operate in the epilogue. */
7723 thread_prologue_and_epilogue_insns ();
7725 /* Shrink-wrapping can result in unreachable edges in the epilogue,
7729 /* The stack usage info is finalized during prologue expansion. */
7730 if (flag_stack_usage_info
)
7731 output_stack_usage ();
7738 const pass_data pass_data_thread_prologue_and_epilogue
=
7740 RTL_PASS
, /* type */
7741 "pro_and_epilogue", /* name */
7742 OPTGROUP_NONE
, /* optinfo_flags */
7743 false, /* has_gate */
7744 true, /* has_execute */
7745 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
7746 0, /* properties_required */
7747 0, /* properties_provided */
7748 0, /* properties_destroyed */
7749 TODO_verify_flow
, /* todo_flags_start */
7750 ( TODO_df_verify
| TODO_df_finish
7751 | TODO_verify_rtl_sharing
), /* todo_flags_finish */
7754 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
7757 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7758 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
7761 /* opt_pass methods: */
7762 unsigned int execute () {
7763 return rest_of_handle_thread_prologue_and_epilogue ();
7766 }; // class pass_thread_prologue_and_epilogue
7771 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7773 return new pass_thread_prologue_and_epilogue (ctxt
);
7777 /* This mini-pass fixes fall-out from SSA in asm statements that have
7778 in-out constraints. Say you start with
7781 asm ("": "+mr" (inout));
7784 which is transformed very early to use explicit output and match operands:
7787 asm ("": "=mr" (inout) : "0" (inout));
7790 Or, after SSA and copyprop,
7792 asm ("": "=mr" (inout_2) : "0" (inout_1));
7795 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7796 they represent two separate values, so they will get different pseudo
7797 registers during expansion. Then, since the two operands need to match
7798 per the constraints, but use different pseudo registers, reload can
7799 only register a reload for these operands. But reloads can only be
7800 satisfied by hardregs, not by memory, so we need a register for this
7801 reload, just because we are presented with non-matching operands.
7802 So, even though we allow memory for this operand, no memory can be
7803 used for it, just because the two operands don't match. This can
7804 cause reload failures on register-starved targets.
7806 So it's a symptom of reload not being able to use memory for reloads
7807 or, alternatively it's also a symptom of both operands not coming into
7808 reload as matching (in which case the pseudo could go to memory just
7809 fine, as the alternative allows it, and no reload would be necessary).
7810 We fix the latter problem here, by transforming
7812 asm ("": "=mr" (inout_2) : "0" (inout_1));
7817 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7820 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
7823 bool changed
= false;
7824 rtx op
= SET_SRC (p_sets
[0]);
7825 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
7826 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
7827 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
7829 memset (output_matched
, 0, noutputs
* sizeof (bool));
7830 for (i
= 0; i
< ninputs
; i
++)
7832 rtx input
, output
, insns
;
7833 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
7837 if (*constraint
== '%')
7840 match
= strtoul (constraint
, &end
, 10);
7841 if (end
== constraint
)
7844 gcc_assert (match
< noutputs
);
7845 output
= SET_DEST (p_sets
[match
]);
7846 input
= RTVEC_ELT (inputs
, i
);
7847 /* Only do the transformation for pseudos. */
7848 if (! REG_P (output
)
7849 || rtx_equal_p (output
, input
)
7850 || (GET_MODE (input
) != VOIDmode
7851 && GET_MODE (input
) != GET_MODE (output
)))
7854 /* We can't do anything if the output is also used as input,
7855 as we're going to overwrite it. */
7856 for (j
= 0; j
< ninputs
; j
++)
7857 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
7862 /* Avoid changing the same input several times. For
7863 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7864 only change in once (to out1), rather than changing it
7865 first to out1 and afterwards to out2. */
7868 for (j
= 0; j
< noutputs
; j
++)
7869 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
7874 output_matched
[match
] = true;
7877 emit_move_insn (output
, input
);
7878 insns
= get_insns ();
7880 emit_insn_before (insns
, insn
);
7882 /* Now replace all mentions of the input with output. We can't
7883 just replace the occurrence in inputs[i], as the register might
7884 also be used in some other input (or even in an address of an
7885 output), which would mean possibly increasing the number of
7886 inputs by one (namely 'output' in addition), which might pose
7887 a too complicated problem for reload to solve. E.g. this situation:
7889 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7891 Here 'input' is used in two occurrences as input (once for the
7892 input operand, once for the address in the second output operand).
7893 If we would replace only the occurrence of the input operand (to
7894 make the matching) we would be left with this:
7897 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7899 Now we suddenly have two different input values (containing the same
7900 value, but different pseudos) where we formerly had only one.
7901 With more complicated asms this might lead to reload failures
7902 which wouldn't have happen without this pass. So, iterate over
7903 all operands and replace all occurrences of the register used. */
7904 for (j
= 0; j
< noutputs
; j
++)
7905 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
7906 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
7907 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
7909 for (j
= 0; j
< ninputs
; j
++)
7910 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
7911 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
7918 df_insn_rescan (insn
);
7922 rest_of_match_asm_constraints (void)
7925 rtx insn
, pat
, *p_sets
;
7928 if (!crtl
->has_asm_statement
)
7931 df_set_flags (DF_DEFER_INSN_RESCAN
);
7932 FOR_EACH_BB_FN (bb
, cfun
)
7934 FOR_BB_INSNS (bb
, insn
)
7939 pat
= PATTERN (insn
);
7940 if (GET_CODE (pat
) == PARALLEL
)
7941 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
7942 else if (GET_CODE (pat
) == SET
)
7943 p_sets
= &PATTERN (insn
), noutputs
= 1;
7947 if (GET_CODE (*p_sets
) == SET
7948 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
7949 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
7953 return TODO_df_finish
;
7958 const pass_data pass_data_match_asm_constraints
=
7960 RTL_PASS
, /* type */
7961 "asmcons", /* name */
7962 OPTGROUP_NONE
, /* optinfo_flags */
7963 false, /* has_gate */
7964 true, /* has_execute */
7965 TV_NONE
, /* tv_id */
7966 0, /* properties_required */
7967 0, /* properties_provided */
7968 0, /* properties_destroyed */
7969 0, /* todo_flags_start */
7970 0, /* todo_flags_finish */
7973 class pass_match_asm_constraints
: public rtl_opt_pass
7976 pass_match_asm_constraints (gcc::context
*ctxt
)
7977 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
7980 /* opt_pass methods: */
7981 unsigned int execute () { return rest_of_match_asm_constraints (); }
7983 }; // class pass_match_asm_constraints
7988 make_pass_match_asm_constraints (gcc::context
*ctxt
)
7990 return new pass_match_asm_constraints (ctxt
);
7994 #include "gt-function.h"