1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
40 #include "stor-layout.h"
42 #include "stringpool.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
58 #include "langhooks.h"
60 #include "common/common-target.h"
63 #include "tree-pass.h"
67 #include "bb-reorder.h"
69 /* So we can assign to cfun in this file. */
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83 /* Similar, but round to the next highest integer that meets the
85 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87 /* Nonzero once virtual register instantiation has been done.
88 assign_stack_local uses frame_pointer_rtx when this is nonzero.
89 calls.c:emit_library_call_value_1 uses it to set up
90 post-instantiation libcalls. */
91 int virtuals_instantiated
;
93 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
94 static GTY(()) int funcdef_no
;
96 /* These variables hold pointers to functions to create and destroy
97 target specific, per-function data structures. */
98 struct machine_function
* (*init_machine_status
) (void);
100 /* The currently compiled function. */
101 struct function
*cfun
= 0;
103 /* These hashes record the prologue and epilogue insns. */
104 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
105 htab_t prologue_insn_hash
;
106 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
107 htab_t epilogue_insn_hash
;
110 htab_t types_used_by_vars_hash
= NULL
;
111 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
113 /* Forward declarations. */
115 static struct temp_slot
*find_temp_slot_from_address (rtx
);
116 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
117 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
118 static void reorder_blocks_1 (rtx
, tree
, vec
<tree
> *);
119 static int all_blocks (tree
, tree
*);
120 static tree
*get_block_vector (tree
, int *);
121 extern tree
debug_find_var_in_block_tree (tree
, tree
);
122 /* We always define `record_insns' even if it's not used so that we
123 can always export `prologue_epilogue_contains'. */
124 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
125 static bool contains (const_rtx
, htab_t
);
126 static void prepare_function_start (void);
127 static void do_clobber_return_reg (rtx
, void *);
128 static void do_use_return_reg (rtx
, void *);
130 /* Stack of nested functions. */
131 /* Keep track of the cfun stack. */
133 typedef struct function
*function_p
;
135 static vec
<function_p
> function_context_stack
;
137 /* Save the current context for compilation of a nested function.
138 This is called from language-specific code. */
141 push_function_context (void)
144 allocate_struct_function (NULL
, false);
146 function_context_stack
.safe_push (cfun
);
150 /* Restore the last saved context, at the end of a nested function.
151 This function is called from language-specific code. */
154 pop_function_context (void)
156 struct function
*p
= function_context_stack
.pop ();
158 current_function_decl
= p
->decl
;
160 /* Reset variables that have known state during rtx generation. */
161 virtuals_instantiated
= 0;
162 generating_concat_p
= 1;
165 /* Clear out all parts of the state in F that can safely be discarded
166 after the function has been parsed, but not compiled, to let
167 garbage collection reclaim the memory. */
170 free_after_parsing (struct function
*f
)
175 /* Clear out all parts of the state in F that can safely be discarded
176 after the function has been compiled, to let garbage collection
177 reclaim the memory. */
180 free_after_compilation (struct function
*f
)
182 prologue_insn_hash
= NULL
;
183 epilogue_insn_hash
= NULL
;
185 free (crtl
->emit
.regno_pointer_align
);
187 memset (crtl
, 0, sizeof (struct rtl_data
));
192 regno_reg_rtx
= NULL
;
195 /* Return size needed for stack frame based on slots so far allocated.
196 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
197 the caller may have to do that. */
200 get_frame_size (void)
202 if (FRAME_GROWS_DOWNWARD
)
203 return -frame_offset
;
208 /* Issue an error message and return TRUE if frame OFFSET overflows in
209 the signed target pointer arithmetics for function FUNC. Otherwise
213 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
215 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
217 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
218 /* Leave room for the fixed part of the frame. */
219 - 64 * UNITS_PER_WORD
)
221 error_at (DECL_SOURCE_LOCATION (func
),
222 "total size of local objects too large");
229 /* Return stack slot alignment in bits for TYPE and MODE. */
232 get_stack_local_alignment (tree type
, enum machine_mode mode
)
234 unsigned int alignment
;
237 alignment
= BIGGEST_ALIGNMENT
;
239 alignment
= GET_MODE_ALIGNMENT (mode
);
241 /* Allow the frond-end to (possibly) increase the alignment of this
244 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
246 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
249 /* Determine whether it is possible to fit a stack slot of size SIZE and
250 alignment ALIGNMENT into an area in the stack frame that starts at
251 frame offset START and has a length of LENGTH. If so, store the frame
252 offset to be used for the stack slot in *POFFSET and return true;
253 return false otherwise. This function will extend the frame size when
254 given a start/length pair that lies at the end of the frame. */
257 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
258 HOST_WIDE_INT size
, unsigned int alignment
,
259 HOST_WIDE_INT
*poffset
)
261 HOST_WIDE_INT this_frame_offset
;
262 int frame_off
, frame_alignment
, frame_phase
;
264 /* Calculate how many bytes the start of local variables is off from
266 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
267 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
268 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
270 /* Round the frame offset to the specified alignment. */
272 /* We must be careful here, since FRAME_OFFSET might be negative and
273 division with a negative dividend isn't as well defined as we might
274 like. So we instead assume that ALIGNMENT is a power of two and
275 use logical operations which are unambiguous. */
276 if (FRAME_GROWS_DOWNWARD
)
278 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
279 (unsigned HOST_WIDE_INT
) alignment
)
283 = (CEIL_ROUND (start
- frame_phase
,
284 (unsigned HOST_WIDE_INT
) alignment
)
287 /* See if it fits. If this space is at the edge of the frame,
288 consider extending the frame to make it fit. Our caller relies on
289 this when allocating a new slot. */
290 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
291 frame_offset
= this_frame_offset
;
292 else if (this_frame_offset
< start
)
294 else if (start
+ length
== frame_offset
295 && this_frame_offset
+ size
> start
+ length
)
296 frame_offset
= this_frame_offset
+ size
;
297 else if (this_frame_offset
+ size
> start
+ length
)
300 *poffset
= this_frame_offset
;
304 /* Create a new frame_space structure describing free space in the stack
305 frame beginning at START and ending at END, and chain it into the
306 function's frame_space_list. */
309 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
311 struct frame_space
*space
= ggc_alloc_frame_space ();
312 space
->next
= crtl
->frame_space_list
;
313 crtl
->frame_space_list
= space
;
314 space
->start
= start
;
315 space
->length
= end
- start
;
318 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
319 with machine mode MODE.
321 ALIGN controls the amount of alignment for the address of the slot:
322 0 means according to MODE,
323 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
324 -2 means use BITS_PER_UNIT,
325 positive specifies alignment boundary in bits.
327 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
328 alignment and ASLK_RECORD_PAD bit set if we should remember
329 extra space we allocated for alignment purposes. When we are
330 called from assign_stack_temp_for_type, it is not set so we don't
331 track the same stack slot in two independent lists.
333 We do not round to stack_boundary here. */
336 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
340 int bigend_correction
= 0;
341 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
342 unsigned int alignment
, alignment_in_bits
;
346 alignment
= get_stack_local_alignment (NULL
, mode
);
347 alignment
/= BITS_PER_UNIT
;
349 else if (align
== -1)
351 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
352 size
= CEIL_ROUND (size
, alignment
);
354 else if (align
== -2)
355 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
357 alignment
= align
/ BITS_PER_UNIT
;
359 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
361 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
362 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
364 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
365 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
368 if (SUPPORTS_STACK_ALIGNMENT
)
370 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
372 if (!crtl
->stack_realign_processed
)
373 crtl
->stack_alignment_estimated
= alignment_in_bits
;
376 /* If stack is realigned and stack alignment value
377 hasn't been finalized, it is OK not to increase
378 stack_alignment_estimated. The bigger alignment
379 requirement is recorded in stack_alignment_needed
381 gcc_assert (!crtl
->stack_realign_finalized
);
382 if (!crtl
->stack_realign_needed
)
384 /* It is OK to reduce the alignment as long as the
385 requested size is 0 or the estimated stack
386 alignment >= mode alignment. */
387 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
389 || (crtl
->stack_alignment_estimated
390 >= GET_MODE_ALIGNMENT (mode
)));
391 alignment_in_bits
= crtl
->stack_alignment_estimated
;
392 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
398 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
399 crtl
->stack_alignment_needed
= alignment_in_bits
;
400 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
401 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
403 if (mode
!= BLKmode
|| size
!= 0)
405 if (kind
& ASLK_RECORD_PAD
)
407 struct frame_space
**psp
;
409 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
411 struct frame_space
*space
= *psp
;
412 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
413 alignment
, &slot_offset
))
416 if (slot_offset
> space
->start
)
417 add_frame_space (space
->start
, slot_offset
);
418 if (slot_offset
+ size
< space
->start
+ space
->length
)
419 add_frame_space (slot_offset
+ size
,
420 space
->start
+ space
->length
);
425 else if (!STACK_ALIGNMENT_NEEDED
)
427 slot_offset
= frame_offset
;
431 old_frame_offset
= frame_offset
;
433 if (FRAME_GROWS_DOWNWARD
)
435 frame_offset
-= size
;
436 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
438 if (kind
& ASLK_RECORD_PAD
)
440 if (slot_offset
> frame_offset
)
441 add_frame_space (frame_offset
, slot_offset
);
442 if (slot_offset
+ size
< old_frame_offset
)
443 add_frame_space (slot_offset
+ size
, old_frame_offset
);
448 frame_offset
+= size
;
449 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
451 if (kind
& ASLK_RECORD_PAD
)
453 if (slot_offset
> old_frame_offset
)
454 add_frame_space (old_frame_offset
, slot_offset
);
455 if (slot_offset
+ size
< frame_offset
)
456 add_frame_space (slot_offset
+ size
, frame_offset
);
461 /* On a big-endian machine, if we are allocating more space than we will use,
462 use the least significant bytes of those that are allocated. */
463 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
464 bigend_correction
= size
- GET_MODE_SIZE (mode
);
466 /* If we have already instantiated virtual registers, return the actual
467 address relative to the frame pointer. */
468 if (virtuals_instantiated
)
469 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
471 (slot_offset
+ bigend_correction
472 + STARTING_FRAME_OFFSET
, Pmode
));
474 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
476 (slot_offset
+ bigend_correction
,
479 x
= gen_rtx_MEM (mode
, addr
);
480 set_mem_align (x
, alignment_in_bits
);
481 MEM_NOTRAP_P (x
) = 1;
484 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
486 if (frame_offset_overflow (frame_offset
, current_function_decl
))
492 /* Wrap up assign_stack_local_1 with last parameter as false. */
495 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
497 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
500 /* In order to evaluate some expressions, such as function calls returning
501 structures in memory, we need to temporarily allocate stack locations.
502 We record each allocated temporary in the following structure.
504 Associated with each temporary slot is a nesting level. When we pop up
505 one level, all temporaries associated with the previous level are freed.
506 Normally, all temporaries are freed after the execution of the statement
507 in which they were created. However, if we are inside a ({...}) grouping,
508 the result may be in a temporary and hence must be preserved. If the
509 result could be in a temporary, we preserve it if we can determine which
510 one it is in. If we cannot determine which temporary may contain the
511 result, all temporaries are preserved. A temporary is preserved by
512 pretending it was allocated at the previous nesting level. */
514 struct GTY(()) temp_slot
{
515 /* Points to next temporary slot. */
516 struct temp_slot
*next
;
517 /* Points to previous temporary slot. */
518 struct temp_slot
*prev
;
519 /* The rtx to used to reference the slot. */
521 /* The size, in units, of the slot. */
523 /* The type of the object in the slot, or zero if it doesn't correspond
524 to a type. We use this to determine whether a slot can be reused.
525 It can be reused if objects of the type of the new slot will always
526 conflict with objects of the type of the old slot. */
528 /* The alignment (in bits) of the slot. */
530 /* Nonzero if this temporary is currently in use. */
532 /* Nesting level at which this slot is being used. */
534 /* The offset of the slot from the frame_pointer, including extra space
535 for alignment. This info is for combine_temp_slots. */
536 HOST_WIDE_INT base_offset
;
537 /* The size of the slot, including extra space for alignment. This
538 info is for combine_temp_slots. */
539 HOST_WIDE_INT full_size
;
542 /* A table of addresses that represent a stack slot. The table is a mapping
543 from address RTXen to a temp slot. */
544 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
545 static size_t n_temp_slots_in_use
;
547 /* Entry for the above hash table. */
548 struct GTY(()) temp_slot_address_entry
{
551 struct temp_slot
*temp_slot
;
554 /* Removes temporary slot TEMP from LIST. */
557 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
560 temp
->next
->prev
= temp
->prev
;
562 temp
->prev
->next
= temp
->next
;
566 temp
->prev
= temp
->next
= NULL
;
569 /* Inserts temporary slot TEMP to LIST. */
572 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
576 (*list
)->prev
= temp
;
581 /* Returns the list of used temp slots at LEVEL. */
583 static struct temp_slot
**
584 temp_slots_at_level (int level
)
586 if (level
>= (int) vec_safe_length (used_temp_slots
))
587 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
589 return &(*used_temp_slots
)[level
];
592 /* Returns the maximal temporary slot level. */
595 max_slot_level (void)
597 if (!used_temp_slots
)
600 return used_temp_slots
->length () - 1;
603 /* Moves temporary slot TEMP to LEVEL. */
606 move_slot_to_level (struct temp_slot
*temp
, int level
)
608 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
609 insert_slot_to_list (temp
, temp_slots_at_level (level
));
613 /* Make temporary slot TEMP available. */
616 make_slot_available (struct temp_slot
*temp
)
618 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
619 insert_slot_to_list (temp
, &avail_temp_slots
);
622 n_temp_slots_in_use
--;
625 /* Compute the hash value for an address -> temp slot mapping.
626 The value is cached on the mapping entry. */
628 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
630 int do_not_record
= 0;
631 return hash_rtx (t
->address
, GET_MODE (t
->address
),
632 &do_not_record
, NULL
, false);
635 /* Return the hash value for an address -> temp slot mapping. */
637 temp_slot_address_hash (const void *p
)
639 const struct temp_slot_address_entry
*t
;
640 t
= (const struct temp_slot_address_entry
*) p
;
644 /* Compare two address -> temp slot mapping entries. */
646 temp_slot_address_eq (const void *p1
, const void *p2
)
648 const struct temp_slot_address_entry
*t1
, *t2
;
649 t1
= (const struct temp_slot_address_entry
*) p1
;
650 t2
= (const struct temp_slot_address_entry
*) p2
;
651 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
654 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
656 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
659 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
660 t
->address
= address
;
661 t
->temp_slot
= temp_slot
;
662 t
->hash
= temp_slot_address_compute_hash (t
);
663 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
667 /* Remove an address -> temp slot mapping entry if the temp slot is
668 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
670 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
672 const struct temp_slot_address_entry
*t
;
673 t
= (const struct temp_slot_address_entry
*) *slot
;
674 if (! t
->temp_slot
->in_use
)
675 htab_clear_slot (temp_slot_address_table
, slot
);
679 /* Remove all mappings of addresses to unused temp slots. */
681 remove_unused_temp_slot_addresses (void)
683 /* Use quicker clearing if there aren't any active temp slots. */
684 if (n_temp_slots_in_use
)
685 htab_traverse (temp_slot_address_table
,
686 remove_unused_temp_slot_addresses_1
,
689 htab_empty (temp_slot_address_table
);
692 /* Find the temp slot corresponding to the object at address X. */
694 static struct temp_slot
*
695 find_temp_slot_from_address (rtx x
)
698 struct temp_slot_address_entry tmp
, *t
;
700 /* First try the easy way:
701 See if X exists in the address -> temp slot mapping. */
703 tmp
.temp_slot
= NULL
;
704 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
705 t
= (struct temp_slot_address_entry
*)
706 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
710 /* If we have a sum involving a register, see if it points to a temp
712 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
713 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
715 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
716 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
719 /* Last resort: Address is a virtual stack var address. */
720 if (GET_CODE (x
) == PLUS
721 && XEXP (x
, 0) == virtual_stack_vars_rtx
722 && CONST_INT_P (XEXP (x
, 1)))
725 for (i
= max_slot_level (); i
>= 0; i
--)
726 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
728 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
729 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
737 /* Allocate a temporary stack slot and record it for possible later
740 MODE is the machine mode to be given to the returned rtx.
742 SIZE is the size in units of the space required. We do no rounding here
743 since assign_stack_local will do any required rounding.
745 TYPE is the type that will be used for the stack slot. */
748 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
752 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
755 /* If SIZE is -1 it means that somebody tried to allocate a temporary
756 of a variable size. */
757 gcc_assert (size
!= -1);
759 align
= get_stack_local_alignment (type
, mode
);
761 /* Try to find an available, already-allocated temporary of the proper
762 mode which meets the size and alignment requirements. Choose the
763 smallest one with the closest alignment.
765 If assign_stack_temp is called outside of the tree->rtl expansion,
766 we cannot reuse the stack slots (that may still refer to
767 VIRTUAL_STACK_VARS_REGNUM). */
768 if (!virtuals_instantiated
)
770 for (p
= avail_temp_slots
; p
; p
= p
->next
)
772 if (p
->align
>= align
&& p
->size
>= size
773 && GET_MODE (p
->slot
) == mode
774 && objects_must_conflict_p (p
->type
, type
)
775 && (best_p
== 0 || best_p
->size
> p
->size
776 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
778 if (p
->align
== align
&& p
->size
== size
)
781 cut_slot_from_list (selected
, &avail_temp_slots
);
790 /* Make our best, if any, the one to use. */
794 cut_slot_from_list (selected
, &avail_temp_slots
);
796 /* If there are enough aligned bytes left over, make them into a new
797 temp_slot so that the extra bytes don't get wasted. Do this only
798 for BLKmode slots, so that we can be sure of the alignment. */
799 if (GET_MODE (best_p
->slot
) == BLKmode
)
801 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
802 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
804 if (best_p
->size
- rounded_size
>= alignment
)
806 p
= ggc_alloc_temp_slot ();
808 p
->size
= best_p
->size
- rounded_size
;
809 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
810 p
->full_size
= best_p
->full_size
- rounded_size
;
811 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
812 p
->align
= best_p
->align
;
813 p
->type
= best_p
->type
;
814 insert_slot_to_list (p
, &avail_temp_slots
);
816 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
819 best_p
->size
= rounded_size
;
820 best_p
->full_size
= rounded_size
;
825 /* If we still didn't find one, make a new temporary. */
828 HOST_WIDE_INT frame_offset_old
= frame_offset
;
830 p
= ggc_alloc_temp_slot ();
832 /* We are passing an explicit alignment request to assign_stack_local.
833 One side effect of that is assign_stack_local will not round SIZE
834 to ensure the frame offset remains suitably aligned.
836 So for requests which depended on the rounding of SIZE, we go ahead
837 and round it now. We also make sure ALIGNMENT is at least
838 BIGGEST_ALIGNMENT. */
839 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
840 p
->slot
= assign_stack_local_1 (mode
,
850 /* The following slot size computation is necessary because we don't
851 know the actual size of the temporary slot until assign_stack_local
852 has performed all the frame alignment and size rounding for the
853 requested temporary. Note that extra space added for alignment
854 can be either above or below this stack slot depending on which
855 way the frame grows. We include the extra space if and only if it
856 is above this slot. */
857 if (FRAME_GROWS_DOWNWARD
)
858 p
->size
= frame_offset_old
- frame_offset
;
862 /* Now define the fields used by combine_temp_slots. */
863 if (FRAME_GROWS_DOWNWARD
)
865 p
->base_offset
= frame_offset
;
866 p
->full_size
= frame_offset_old
- frame_offset
;
870 p
->base_offset
= frame_offset_old
;
871 p
->full_size
= frame_offset
- frame_offset_old
;
880 p
->level
= temp_slot_level
;
881 n_temp_slots_in_use
++;
883 pp
= temp_slots_at_level (p
->level
);
884 insert_slot_to_list (p
, pp
);
885 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
887 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
888 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
889 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
891 /* If we know the alias set for the memory that will be used, use
892 it. If there's no TYPE, then we don't know anything about the
893 alias set for the memory. */
894 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
895 set_mem_align (slot
, align
);
897 /* If a type is specified, set the relevant flags. */
899 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
900 MEM_NOTRAP_P (slot
) = 1;
905 /* Allocate a temporary stack slot and record it for possible later
906 reuse. First two arguments are same as in preceding function. */
909 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
)
911 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
914 /* Assign a temporary.
915 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
916 and so that should be used in error messages. In either case, we
917 allocate of the given type.
918 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
919 it is 0 if a register is OK.
920 DONT_PROMOTE is 1 if we should not promote values in register
924 assign_temp (tree type_or_decl
, int memory_required
,
925 int dont_promote ATTRIBUTE_UNUSED
)
928 enum machine_mode mode
;
933 if (DECL_P (type_or_decl
))
934 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
936 decl
= NULL
, type
= type_or_decl
;
938 mode
= TYPE_MODE (type
);
940 unsignedp
= TYPE_UNSIGNED (type
);
943 if (mode
== BLKmode
|| memory_required
)
945 HOST_WIDE_INT size
= int_size_in_bytes (type
);
948 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
949 problems with allocating the stack space. */
953 /* Unfortunately, we don't yet know how to allocate variable-sized
954 temporaries. However, sometimes we can find a fixed upper limit on
955 the size, so try that instead. */
957 size
= max_int_size_in_bytes (type
);
959 /* The size of the temporary may be too large to fit into an integer. */
960 /* ??? Not sure this should happen except for user silliness, so limit
961 this to things that aren't compiler-generated temporaries. The
962 rest of the time we'll die in assign_stack_temp_for_type. */
963 if (decl
&& size
== -1
964 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
966 error ("size of variable %q+D is too large", decl
);
970 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
976 mode
= promote_mode (type
, mode
, &unsignedp
);
979 return gen_reg_rtx (mode
);
982 /* Combine temporary stack slots which are adjacent on the stack.
984 This allows for better use of already allocated stack space. This is only
985 done for BLKmode slots because we can be sure that we won't have alignment
986 problems in this case. */
989 combine_temp_slots (void)
991 struct temp_slot
*p
, *q
, *next
, *next_q
;
994 /* We can't combine slots, because the information about which slot
995 is in which alias set will be lost. */
996 if (flag_strict_aliasing
)
999 /* If there are a lot of temp slots, don't do anything unless
1000 high levels of optimization. */
1001 if (! flag_expensive_optimizations
)
1002 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1003 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1006 for (p
= avail_temp_slots
; p
; p
= next
)
1012 if (GET_MODE (p
->slot
) != BLKmode
)
1015 for (q
= p
->next
; q
; q
= next_q
)
1021 if (GET_MODE (q
->slot
) != BLKmode
)
1024 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1026 /* Q comes after P; combine Q into P. */
1028 p
->full_size
+= q
->full_size
;
1031 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1033 /* P comes after Q; combine P into Q. */
1035 q
->full_size
+= p
->full_size
;
1040 cut_slot_from_list (q
, &avail_temp_slots
);
1043 /* Either delete P or advance past it. */
1045 cut_slot_from_list (p
, &avail_temp_slots
);
1049 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1050 slot that previously was known by OLD_RTX. */
1053 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1055 struct temp_slot
*p
;
1057 if (rtx_equal_p (old_rtx
, new_rtx
))
1060 p
= find_temp_slot_from_address (old_rtx
);
1062 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1063 NEW_RTX is a register, see if one operand of the PLUS is a
1064 temporary location. If so, NEW_RTX points into it. Otherwise,
1065 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1066 in common between them. If so, try a recursive call on those
1070 if (GET_CODE (old_rtx
) != PLUS
)
1073 if (REG_P (new_rtx
))
1075 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1076 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1079 else if (GET_CODE (new_rtx
) != PLUS
)
1082 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1083 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1084 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1085 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1086 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1087 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1088 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1089 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1094 /* Otherwise add an alias for the temp's address. */
1095 insert_temp_slot_address (new_rtx
, p
);
1098 /* If X could be a reference to a temporary slot, mark that slot as
1099 belonging to the to one level higher than the current level. If X
1100 matched one of our slots, just mark that one. Otherwise, we can't
1101 easily predict which it is, so upgrade all of them.
1103 This is called when an ({...}) construct occurs and a statement
1104 returns a value in memory. */
1107 preserve_temp_slots (rtx x
)
1109 struct temp_slot
*p
= 0, *next
;
1114 /* If X is a register that is being used as a pointer, see if we have
1115 a temporary slot we know it points to. */
1116 if (REG_P (x
) && REG_POINTER (x
))
1117 p
= find_temp_slot_from_address (x
);
1119 /* If X is not in memory or is at a constant address, it cannot be in
1120 a temporary slot. */
1121 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1124 /* First see if we can find a match. */
1126 p
= find_temp_slot_from_address (XEXP (x
, 0));
1130 if (p
->level
== temp_slot_level
)
1131 move_slot_to_level (p
, temp_slot_level
- 1);
1135 /* Otherwise, preserve all non-kept slots at this level. */
1136 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1139 move_slot_to_level (p
, temp_slot_level
- 1);
1143 /* Free all temporaries used so far. This is normally called at the
1144 end of generating code for a statement. */
1147 free_temp_slots (void)
1149 struct temp_slot
*p
, *next
;
1150 bool some_available
= false;
1152 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1155 make_slot_available (p
);
1156 some_available
= true;
1161 remove_unused_temp_slot_addresses ();
1162 combine_temp_slots ();
1166 /* Push deeper into the nesting level for stack temporaries. */
1169 push_temp_slots (void)
1174 /* Pop a temporary nesting level. All slots in use in the current level
1178 pop_temp_slots (void)
1184 /* Initialize temporary slots. */
1187 init_temp_slots (void)
1189 /* We have not allocated any temporaries yet. */
1190 avail_temp_slots
= 0;
1191 vec_alloc (used_temp_slots
, 0);
1192 temp_slot_level
= 0;
1193 n_temp_slots_in_use
= 0;
1195 /* Set up the table to map addresses to temp slots. */
1196 if (! temp_slot_address_table
)
1197 temp_slot_address_table
= htab_create_ggc (32,
1198 temp_slot_address_hash
,
1199 temp_slot_address_eq
,
1202 htab_empty (temp_slot_address_table
);
1205 /* Functions and data structures to keep track of the values hard regs
1206 had at the start of the function. */
1208 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1209 and has_hard_reg_initial_val.. */
1210 typedef struct GTY(()) initial_value_pair
{
1213 } initial_value_pair
;
1214 /* ??? This could be a VEC but there is currently no way to define an
1215 opaque VEC type. This could be worked around by defining struct
1216 initial_value_pair in function.h. */
1217 typedef struct GTY(()) initial_value_struct
{
1220 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1221 } initial_value_struct
;
1223 /* If a pseudo represents an initial hard reg (or expression), return
1224 it, else return NULL_RTX. */
1227 get_hard_reg_initial_reg (rtx reg
)
1229 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1235 for (i
= 0; i
< ivs
->num_entries
; i
++)
1236 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1237 return ivs
->entries
[i
].hard_reg
;
1242 /* Make sure that there's a pseudo register of mode MODE that stores the
1243 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1246 get_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1248 struct initial_value_struct
*ivs
;
1251 rv
= has_hard_reg_initial_val (mode
, regno
);
1255 ivs
= crtl
->hard_reg_initial_vals
;
1258 ivs
= ggc_alloc_initial_value_struct ();
1259 ivs
->num_entries
= 0;
1260 ivs
->max_entries
= 5;
1261 ivs
->entries
= ggc_alloc_vec_initial_value_pair (5);
1262 crtl
->hard_reg_initial_vals
= ivs
;
1265 if (ivs
->num_entries
>= ivs
->max_entries
)
1267 ivs
->max_entries
+= 5;
1268 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1272 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1273 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1275 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1278 /* See if get_hard_reg_initial_val has been used to create a pseudo
1279 for the initial value of hard register REGNO in mode MODE. Return
1280 the associated pseudo if so, otherwise return NULL. */
1283 has_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1285 struct initial_value_struct
*ivs
;
1288 ivs
= crtl
->hard_reg_initial_vals
;
1290 for (i
= 0; i
< ivs
->num_entries
; i
++)
1291 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1292 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1293 return ivs
->entries
[i
].pseudo
;
1299 emit_initial_value_sets (void)
1301 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1309 for (i
= 0; i
< ivs
->num_entries
; i
++)
1310 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1314 emit_insn_at_entry (seq
);
1318 /* Return the hardreg-pseudoreg initial values pair entry I and
1319 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1321 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1323 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1324 if (!ivs
|| i
>= ivs
->num_entries
)
1327 *hreg
= ivs
->entries
[i
].hard_reg
;
1328 *preg
= ivs
->entries
[i
].pseudo
;
1332 /* These routines are responsible for converting virtual register references
1333 to the actual hard register references once RTL generation is complete.
1335 The following four variables are used for communication between the
1336 routines. They contain the offsets of the virtual registers from their
1337 respective hard registers. */
1339 static int in_arg_offset
;
1340 static int var_offset
;
1341 static int dynamic_offset
;
1342 static int out_arg_offset
;
1343 static int cfa_offset
;
1345 /* In most machines, the stack pointer register is equivalent to the bottom
1348 #ifndef STACK_POINTER_OFFSET
1349 #define STACK_POINTER_OFFSET 0
1352 /* If not defined, pick an appropriate default for the offset of dynamically
1353 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1354 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1356 #ifndef STACK_DYNAMIC_OFFSET
1358 /* The bottom of the stack points to the actual arguments. If
1359 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1360 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1361 stack space for register parameters is not pushed by the caller, but
1362 rather part of the fixed stack areas and hence not included in
1363 `crtl->outgoing_args_size'. Nevertheless, we must allow
1364 for it when allocating stack dynamic objects. */
1366 #if defined(REG_PARM_STACK_SPACE)
1367 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1368 ((ACCUMULATE_OUTGOING_ARGS \
1369 ? (crtl->outgoing_args_size \
1370 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1371 : REG_PARM_STACK_SPACE (FNDECL))) \
1372 : 0) + (STACK_POINTER_OFFSET))
1374 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1375 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1376 + (STACK_POINTER_OFFSET))
1381 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1382 is a virtual register, return the equivalent hard register and set the
1383 offset indirectly through the pointer. Otherwise, return 0. */
1386 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1389 HOST_WIDE_INT offset
;
1391 if (x
== virtual_incoming_args_rtx
)
1393 if (stack_realign_drap
)
1395 /* Replace virtual_incoming_args_rtx with internal arg
1396 pointer if DRAP is used to realign stack. */
1397 new_rtx
= crtl
->args
.internal_arg_pointer
;
1401 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1403 else if (x
== virtual_stack_vars_rtx
)
1404 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1405 else if (x
== virtual_stack_dynamic_rtx
)
1406 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1407 else if (x
== virtual_outgoing_args_rtx
)
1408 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1409 else if (x
== virtual_cfa_rtx
)
1411 #ifdef FRAME_POINTER_CFA_OFFSET
1412 new_rtx
= frame_pointer_rtx
;
1414 new_rtx
= arg_pointer_rtx
;
1416 offset
= cfa_offset
;
1418 else if (x
== virtual_preferred_stack_boundary_rtx
)
1420 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1430 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1431 Instantiate any virtual registers present inside of *LOC. The expression
1432 is simplified, as much as possible, but is not to be considered "valid"
1433 in any sense implied by the target. If any change is made, set CHANGED
1437 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1439 HOST_WIDE_INT offset
;
1440 bool *changed
= (bool *) data
;
1447 switch (GET_CODE (x
))
1450 new_rtx
= instantiate_new_reg (x
, &offset
);
1453 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1460 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1463 new_rtx
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1464 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1470 /* FIXME -- from old code */
1471 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1472 we can commute the PLUS and SUBREG because pointers into the
1473 frame are well-behaved. */
1483 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1484 matches the predicate for insn CODE operand OPERAND. */
1487 safe_insn_predicate (int code
, int operand
, rtx x
)
1489 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1492 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1493 registers present inside of insn. The result will be a valid insn. */
1496 instantiate_virtual_regs_in_insn (rtx insn
)
1498 HOST_WIDE_INT offset
;
1500 bool any_change
= false;
1501 rtx set
, new_rtx
, x
, seq
;
1503 /* There are some special cases to be handled first. */
1504 set
= single_set (insn
);
1507 /* We're allowed to assign to a virtual register. This is interpreted
1508 to mean that the underlying register gets assigned the inverse
1509 transformation. This is used, for example, in the handling of
1511 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1516 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1517 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1518 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1519 x
= force_operand (x
, new_rtx
);
1521 emit_move_insn (new_rtx
, x
);
1526 emit_insn_before (seq
, insn
);
1531 /* Handle a straight copy from a virtual register by generating a
1532 new add insn. The difference between this and falling through
1533 to the generic case is avoiding a new pseudo and eliminating a
1534 move insn in the initial rtl stream. */
1535 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1536 if (new_rtx
&& offset
!= 0
1537 && REG_P (SET_DEST (set
))
1538 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1542 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1543 gen_int_mode (offset
,
1544 GET_MODE (SET_DEST (set
))),
1545 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1546 if (x
!= SET_DEST (set
))
1547 emit_move_insn (SET_DEST (set
), x
);
1552 emit_insn_before (seq
, insn
);
1557 extract_insn (insn
);
1558 insn_code
= INSN_CODE (insn
);
1560 /* Handle a plus involving a virtual register by determining if the
1561 operands remain valid if they're modified in place. */
1562 if (GET_CODE (SET_SRC (set
)) == PLUS
1563 && recog_data
.n_operands
>= 3
1564 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1565 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1566 && CONST_INT_P (recog_data
.operand
[2])
1567 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1569 offset
+= INTVAL (recog_data
.operand
[2]);
1571 /* If the sum is zero, then replace with a plain move. */
1573 && REG_P (SET_DEST (set
))
1574 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1577 emit_move_insn (SET_DEST (set
), new_rtx
);
1581 emit_insn_before (seq
, insn
);
1586 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1588 /* Using validate_change and apply_change_group here leaves
1589 recog_data in an invalid state. Since we know exactly what
1590 we want to check, do those two by hand. */
1591 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1592 && safe_insn_predicate (insn_code
, 2, x
))
1594 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1595 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1598 /* Fall through into the regular operand fixup loop in
1599 order to take care of operands other than 1 and 2. */
1605 extract_insn (insn
);
1606 insn_code
= INSN_CODE (insn
);
1609 /* In the general case, we expect virtual registers to appear only in
1610 operands, and then only as either bare registers or inside memories. */
1611 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1613 x
= recog_data
.operand
[i
];
1614 switch (GET_CODE (x
))
1618 rtx addr
= XEXP (x
, 0);
1619 bool changed
= false;
1621 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1626 x
= replace_equiv_address (x
, addr
);
1627 /* It may happen that the address with the virtual reg
1628 was valid (e.g. based on the virtual stack reg, which might
1629 be acceptable to the predicates with all offsets), whereas
1630 the address now isn't anymore, for instance when the address
1631 is still offsetted, but the base reg isn't virtual-stack-reg
1632 anymore. Below we would do a force_reg on the whole operand,
1633 but this insn might actually only accept memory. Hence,
1634 before doing that last resort, try to reload the address into
1635 a register, so this operand stays a MEM. */
1636 if (!safe_insn_predicate (insn_code
, i
, x
))
1638 addr
= force_reg (GET_MODE (addr
), addr
);
1639 x
= replace_equiv_address (x
, addr
);
1644 emit_insn_before (seq
, insn
);
1649 new_rtx
= instantiate_new_reg (x
, &offset
);
1650 if (new_rtx
== NULL
)
1658 /* Careful, special mode predicates may have stuff in
1659 insn_data[insn_code].operand[i].mode that isn't useful
1660 to us for computing a new value. */
1661 /* ??? Recognize address_operand and/or "p" constraints
1662 to see if (plus new offset) is a valid before we put
1663 this through expand_simple_binop. */
1664 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1665 gen_int_mode (offset
, GET_MODE (x
)),
1666 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1669 emit_insn_before (seq
, insn
);
1674 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1675 if (new_rtx
== NULL
)
1680 new_rtx
= expand_simple_binop
1681 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1682 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1683 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1686 emit_insn_before (seq
, insn
);
1688 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1689 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1697 /* At this point, X contains the new value for the operand.
1698 Validate the new value vs the insn predicate. Note that
1699 asm insns will have insn_code -1 here. */
1700 if (!safe_insn_predicate (insn_code
, i
, x
))
1705 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1706 x
= copy_to_reg (x
);
1709 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1713 emit_insn_before (seq
, insn
);
1716 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1722 /* Propagate operand changes into the duplicates. */
1723 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1724 *recog_data
.dup_loc
[i
]
1725 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1727 /* Force re-recognition of the instruction for validation. */
1728 INSN_CODE (insn
) = -1;
1731 if (asm_noperands (PATTERN (insn
)) >= 0)
1733 if (!check_asm_operands (PATTERN (insn
)))
1735 error_for_asm (insn
, "impossible constraint in %<asm%>");
1736 /* For asm goto, instead of fixing up all the edges
1737 just clear the template and clear input operands
1738 (asm goto doesn't have any output operands). */
1741 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1742 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1743 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1744 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1752 if (recog_memoized (insn
) < 0)
1753 fatal_insn_not_found (insn
);
1757 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1758 do any instantiation required. */
1761 instantiate_decl_rtl (rtx x
)
1768 /* If this is a CONCAT, recurse for the pieces. */
1769 if (GET_CODE (x
) == CONCAT
)
1771 instantiate_decl_rtl (XEXP (x
, 0));
1772 instantiate_decl_rtl (XEXP (x
, 1));
1776 /* If this is not a MEM, no need to do anything. Similarly if the
1777 address is a constant or a register that is not a virtual register. */
1782 if (CONSTANT_P (addr
)
1784 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1785 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1788 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1791 /* Helper for instantiate_decls called via walk_tree: Process all decls
1792 in the given DECL_VALUE_EXPR. */
1795 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1803 if (DECL_RTL_SET_P (t
))
1804 instantiate_decl_rtl (DECL_RTL (t
));
1805 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1806 && DECL_INCOMING_RTL (t
))
1807 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1808 if ((TREE_CODE (t
) == VAR_DECL
1809 || TREE_CODE (t
) == RESULT_DECL
)
1810 && DECL_HAS_VALUE_EXPR_P (t
))
1812 tree v
= DECL_VALUE_EXPR (t
);
1813 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1820 /* Subroutine of instantiate_decls: Process all decls in the given
1821 BLOCK node and all its subblocks. */
1824 instantiate_decls_1 (tree let
)
1828 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1830 if (DECL_RTL_SET_P (t
))
1831 instantiate_decl_rtl (DECL_RTL (t
));
1832 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1834 tree v
= DECL_VALUE_EXPR (t
);
1835 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1839 /* Process all subblocks. */
1840 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1841 instantiate_decls_1 (t
);
1844 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1845 all virtual registers in their DECL_RTL's. */
1848 instantiate_decls (tree fndecl
)
1853 /* Process all parameters of the function. */
1854 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1856 instantiate_decl_rtl (DECL_RTL (decl
));
1857 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1858 if (DECL_HAS_VALUE_EXPR_P (decl
))
1860 tree v
= DECL_VALUE_EXPR (decl
);
1861 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1865 if ((decl
= DECL_RESULT (fndecl
))
1866 && TREE_CODE (decl
) == RESULT_DECL
)
1868 if (DECL_RTL_SET_P (decl
))
1869 instantiate_decl_rtl (DECL_RTL (decl
));
1870 if (DECL_HAS_VALUE_EXPR_P (decl
))
1872 tree v
= DECL_VALUE_EXPR (decl
);
1873 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1877 /* Now process all variables defined in the function or its subblocks. */
1878 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1880 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1881 if (DECL_RTL_SET_P (decl
))
1882 instantiate_decl_rtl (DECL_RTL (decl
));
1883 vec_free (cfun
->local_decls
);
1886 /* Pass through the INSNS of function FNDECL and convert virtual register
1887 references to hard register references. */
1890 instantiate_virtual_regs (void)
1894 /* Compute the offsets to use for this function. */
1895 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1896 var_offset
= STARTING_FRAME_OFFSET
;
1897 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1898 out_arg_offset
= STACK_POINTER_OFFSET
;
1899 #ifdef FRAME_POINTER_CFA_OFFSET
1900 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1902 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1905 /* Initialize recognition, indicating that volatile is OK. */
1908 /* Scan through all the insns, instantiating every virtual register still
1910 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1913 /* These patterns in the instruction stream can never be recognized.
1914 Fortunately, they shouldn't contain virtual registers either. */
1915 if (GET_CODE (PATTERN (insn
)) == USE
1916 || GET_CODE (PATTERN (insn
)) == CLOBBER
1917 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1919 else if (DEBUG_INSN_P (insn
))
1920 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1921 instantiate_virtual_regs_in_rtx
, NULL
);
1923 instantiate_virtual_regs_in_insn (insn
);
1925 if (INSN_DELETED_P (insn
))
1928 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1930 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1932 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1933 instantiate_virtual_regs_in_rtx
, NULL
);
1936 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1937 instantiate_decls (current_function_decl
);
1939 targetm
.instantiate_decls ();
1941 /* Indicate that, from now on, assign_stack_local should use
1942 frame_pointer_rtx. */
1943 virtuals_instantiated
= 1;
1950 const pass_data pass_data_instantiate_virtual_regs
=
1952 RTL_PASS
, /* type */
1954 OPTGROUP_NONE
, /* optinfo_flags */
1955 false, /* has_gate */
1956 true, /* has_execute */
1957 TV_NONE
, /* tv_id */
1958 0, /* properties_required */
1959 0, /* properties_provided */
1960 0, /* properties_destroyed */
1961 0, /* todo_flags_start */
1962 0, /* todo_flags_finish */
1965 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1968 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1969 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1972 /* opt_pass methods: */
1973 unsigned int execute () { return instantiate_virtual_regs (); }
1975 }; // class pass_instantiate_virtual_regs
1980 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1982 return new pass_instantiate_virtual_regs (ctxt
);
1986 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1987 This means a type for which function calls must pass an address to the
1988 function or get an address back from the function.
1989 EXP may be a type node or an expression (whose type is tested). */
1992 aggregate_value_p (const_tree exp
, const_tree fntype
)
1994 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1995 int i
, regno
, nregs
;
1999 switch (TREE_CODE (fntype
))
2003 tree fndecl
= get_callee_fndecl (fntype
);
2005 ? TREE_TYPE (fndecl
)
2006 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
2010 fntype
= TREE_TYPE (fntype
);
2015 case IDENTIFIER_NODE
:
2019 /* We don't expect other tree types here. */
2023 if (VOID_TYPE_P (type
))
2026 /* If a record should be passed the same as its first (and only) member
2027 don't pass it as an aggregate. */
2028 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2029 return aggregate_value_p (first_field (type
), fntype
);
2031 /* If the front end has decided that this needs to be passed by
2032 reference, do so. */
2033 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2034 && DECL_BY_REFERENCE (exp
))
2037 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2038 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2041 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2042 and thus can't be returned in registers. */
2043 if (TREE_ADDRESSABLE (type
))
2046 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2049 if (targetm
.calls
.return_in_memory (type
, fntype
))
2052 /* Make sure we have suitable call-clobbered regs to return
2053 the value in; if not, we must return it in memory. */
2054 reg
= hard_function_value (type
, 0, fntype
, 0);
2056 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2061 regno
= REGNO (reg
);
2062 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2063 for (i
= 0; i
< nregs
; i
++)
2064 if (! call_used_regs
[regno
+ i
])
2070 /* Return true if we should assign DECL a pseudo register; false if it
2071 should live on the local stack. */
2074 use_register_for_decl (const_tree decl
)
2076 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2079 /* Honor volatile. */
2080 if (TREE_SIDE_EFFECTS (decl
))
2083 /* Honor addressability. */
2084 if (TREE_ADDRESSABLE (decl
))
2087 /* Only register-like things go in registers. */
2088 if (DECL_MODE (decl
) == BLKmode
)
2091 /* If -ffloat-store specified, don't put explicit float variables
2093 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2094 propagates values across these stores, and it probably shouldn't. */
2095 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2098 /* If we're not interested in tracking debugging information for
2099 this decl, then we can certainly put it in a register. */
2100 if (DECL_IGNORED_P (decl
))
2106 if (!DECL_REGISTER (decl
))
2109 switch (TREE_CODE (TREE_TYPE (decl
)))
2113 case QUAL_UNION_TYPE
:
2114 /* When not optimizing, disregard register keyword for variables with
2115 types containing methods, otherwise the methods won't be callable
2116 from the debugger. */
2117 if (TYPE_METHODS (TREE_TYPE (decl
)))
2127 /* Return true if TYPE should be passed by invisible reference. */
2130 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2131 tree type
, bool named_arg
)
2135 /* If this type contains non-trivial constructors, then it is
2136 forbidden for the middle-end to create any new copies. */
2137 if (TREE_ADDRESSABLE (type
))
2140 /* GCC post 3.4 passes *all* variable sized types by reference. */
2141 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2144 /* If a record type should be passed the same as its first (and only)
2145 member, use the type and mode of that member. */
2146 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2148 type
= TREE_TYPE (first_field (type
));
2149 mode
= TYPE_MODE (type
);
2153 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2157 /* Return true if TYPE, which is passed by reference, should be callee
2158 copied instead of caller copied. */
2161 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2162 tree type
, bool named_arg
)
2164 if (type
&& TREE_ADDRESSABLE (type
))
2166 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2170 /* Structures to communicate between the subroutines of assign_parms.
2171 The first holds data persistent across all parameters, the second
2172 is cleared out for each parameter. */
2174 struct assign_parm_data_all
2176 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2177 should become a job of the target or otherwise encapsulated. */
2178 CUMULATIVE_ARGS args_so_far_v
;
2179 cumulative_args_t args_so_far
;
2180 struct args_size stack_args_size
;
2181 tree function_result_decl
;
2183 rtx first_conversion_insn
;
2184 rtx last_conversion_insn
;
2185 HOST_WIDE_INT pretend_args_size
;
2186 HOST_WIDE_INT extra_pretend_bytes
;
2187 int reg_parm_stack_space
;
2190 struct assign_parm_data_one
2196 enum machine_mode nominal_mode
;
2197 enum machine_mode passed_mode
;
2198 enum machine_mode promoted_mode
;
2199 struct locate_and_pad_arg_data locate
;
2201 BOOL_BITFIELD named_arg
: 1;
2202 BOOL_BITFIELD passed_pointer
: 1;
2203 BOOL_BITFIELD on_stack
: 1;
2204 BOOL_BITFIELD loaded_in_reg
: 1;
2207 /* A subroutine of assign_parms. Initialize ALL. */
2210 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2212 tree fntype ATTRIBUTE_UNUSED
;
2214 memset (all
, 0, sizeof (*all
));
2216 fntype
= TREE_TYPE (current_function_decl
);
2218 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2219 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2221 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2222 current_function_decl
, -1);
2224 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2226 #ifdef REG_PARM_STACK_SPACE
2227 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2231 /* If ARGS contains entries with complex types, split the entry into two
2232 entries of the component type. Return a new list of substitutions are
2233 needed, else the old list. */
2236 split_complex_args (vec
<tree
> *args
)
2241 FOR_EACH_VEC_ELT (*args
, i
, p
)
2243 tree type
= TREE_TYPE (p
);
2244 if (TREE_CODE (type
) == COMPLEX_TYPE
2245 && targetm
.calls
.split_complex_arg (type
))
2248 tree subtype
= TREE_TYPE (type
);
2249 bool addressable
= TREE_ADDRESSABLE (p
);
2251 /* Rewrite the PARM_DECL's type with its component. */
2253 TREE_TYPE (p
) = subtype
;
2254 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2255 DECL_MODE (p
) = VOIDmode
;
2256 DECL_SIZE (p
) = NULL
;
2257 DECL_SIZE_UNIT (p
) = NULL
;
2258 /* If this arg must go in memory, put it in a pseudo here.
2259 We can't allow it to go in memory as per normal parms,
2260 because the usual place might not have the imag part
2261 adjacent to the real part. */
2262 DECL_ARTIFICIAL (p
) = addressable
;
2263 DECL_IGNORED_P (p
) = addressable
;
2264 TREE_ADDRESSABLE (p
) = 0;
2268 /* Build a second synthetic decl. */
2269 decl
= build_decl (EXPR_LOCATION (p
),
2270 PARM_DECL
, NULL_TREE
, subtype
);
2271 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2272 DECL_ARTIFICIAL (decl
) = addressable
;
2273 DECL_IGNORED_P (decl
) = addressable
;
2274 layout_decl (decl
, 0);
2275 args
->safe_insert (++i
, decl
);
2280 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2281 the hidden struct return argument, and (abi willing) complex args.
2282 Return the new parameter list. */
2285 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2287 tree fndecl
= current_function_decl
;
2288 tree fntype
= TREE_TYPE (fndecl
);
2289 vec
<tree
> fnargs
= vNULL
;
2292 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2293 fnargs
.safe_push (arg
);
2295 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2297 /* If struct value address is treated as the first argument, make it so. */
2298 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2299 && ! cfun
->returns_pcc_struct
2300 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2302 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2305 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2306 PARM_DECL
, get_identifier (".result_ptr"), type
);
2307 DECL_ARG_TYPE (decl
) = type
;
2308 DECL_ARTIFICIAL (decl
) = 1;
2309 DECL_NAMELESS (decl
) = 1;
2310 TREE_CONSTANT (decl
) = 1;
2312 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2313 all
->orig_fnargs
= decl
;
2314 fnargs
.safe_insert (0, decl
);
2316 all
->function_result_decl
= decl
;
2319 /* If the target wants to split complex arguments into scalars, do so. */
2320 if (targetm
.calls
.split_complex_arg
)
2321 split_complex_args (&fnargs
);
2326 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2327 data for the parameter. Incorporate ABI specifics such as pass-by-
2328 reference and type promotion. */
2331 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2332 struct assign_parm_data_one
*data
)
2334 tree nominal_type
, passed_type
;
2335 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2338 memset (data
, 0, sizeof (*data
));
2340 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2342 data
->named_arg
= 1; /* No variadic parms. */
2343 else if (DECL_CHAIN (parm
))
2344 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2345 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2346 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2348 data
->named_arg
= 0; /* Treat as variadic. */
2350 nominal_type
= TREE_TYPE (parm
);
2351 passed_type
= DECL_ARG_TYPE (parm
);
2353 /* Look out for errors propagating this far. Also, if the parameter's
2354 type is void then its value doesn't matter. */
2355 if (TREE_TYPE (parm
) == error_mark_node
2356 /* This can happen after weird syntax errors
2357 or if an enum type is defined among the parms. */
2358 || TREE_CODE (parm
) != PARM_DECL
2359 || passed_type
== NULL
2360 || VOID_TYPE_P (nominal_type
))
2362 nominal_type
= passed_type
= void_type_node
;
2363 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2367 /* Find mode of arg as it is passed, and mode of arg as it should be
2368 during execution of this function. */
2369 passed_mode
= TYPE_MODE (passed_type
);
2370 nominal_mode
= TYPE_MODE (nominal_type
);
2372 /* If the parm is to be passed as a transparent union or record, use the
2373 type of the first field for the tests below. We have already verified
2374 that the modes are the same. */
2375 if ((TREE_CODE (passed_type
) == UNION_TYPE
2376 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2377 && TYPE_TRANSPARENT_AGGR (passed_type
))
2378 passed_type
= TREE_TYPE (first_field (passed_type
));
2380 /* See if this arg was passed by invisible reference. */
2381 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2382 passed_type
, data
->named_arg
))
2384 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2385 data
->passed_pointer
= true;
2386 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2389 /* Find mode as it is passed by the ABI. */
2390 unsignedp
= TYPE_UNSIGNED (passed_type
);
2391 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2392 TREE_TYPE (current_function_decl
), 0);
2395 data
->nominal_type
= nominal_type
;
2396 data
->passed_type
= passed_type
;
2397 data
->nominal_mode
= nominal_mode
;
2398 data
->passed_mode
= passed_mode
;
2399 data
->promoted_mode
= promoted_mode
;
2402 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2405 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2406 struct assign_parm_data_one
*data
, bool no_rtl
)
2408 int varargs_pretend_bytes
= 0;
2410 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2411 data
->promoted_mode
,
2413 &varargs_pretend_bytes
, no_rtl
);
2415 /* If the back-end has requested extra stack space, record how much is
2416 needed. Do not change pretend_args_size otherwise since it may be
2417 nonzero from an earlier partial argument. */
2418 if (varargs_pretend_bytes
> 0)
2419 all
->pretend_args_size
= varargs_pretend_bytes
;
2422 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2423 the incoming location of the current parameter. */
2426 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2427 struct assign_parm_data_one
*data
)
2429 HOST_WIDE_INT pretend_bytes
= 0;
2433 if (data
->promoted_mode
== VOIDmode
)
2435 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2439 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2440 data
->promoted_mode
,
2444 if (entry_parm
== 0)
2445 data
->promoted_mode
= data
->passed_mode
;
2447 /* Determine parm's home in the stack, in case it arrives in the stack
2448 or we should pretend it did. Compute the stack position and rtx where
2449 the argument arrives and its size.
2451 There is one complexity here: If this was a parameter that would
2452 have been passed in registers, but wasn't only because it is
2453 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2454 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2455 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2456 as it was the previous time. */
2457 in_regs
= entry_parm
!= 0;
2458 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2461 if (!in_regs
&& !data
->named_arg
)
2463 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2466 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2467 data
->promoted_mode
,
2468 data
->passed_type
, true);
2469 in_regs
= tem
!= NULL
;
2473 /* If this parameter was passed both in registers and in the stack, use
2474 the copy on the stack. */
2475 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2483 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2484 data
->promoted_mode
,
2487 data
->partial
= partial
;
2489 /* The caller might already have allocated stack space for the
2490 register parameters. */
2491 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2493 /* Part of this argument is passed in registers and part
2494 is passed on the stack. Ask the prologue code to extend
2495 the stack part so that we can recreate the full value.
2497 PRETEND_BYTES is the size of the registers we need to store.
2498 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2499 stack space that the prologue should allocate.
2501 Internally, gcc assumes that the argument pointer is aligned
2502 to STACK_BOUNDARY bits. This is used both for alignment
2503 optimizations (see init_emit) and to locate arguments that are
2504 aligned to more than PARM_BOUNDARY bits. We must preserve this
2505 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2506 a stack boundary. */
2508 /* We assume at most one partial arg, and it must be the first
2509 argument on the stack. */
2510 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2512 pretend_bytes
= partial
;
2513 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2515 /* We want to align relative to the actual stack pointer, so
2516 don't include this in the stack size until later. */
2517 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2521 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2522 all
->reg_parm_stack_space
,
2523 entry_parm
? data
->partial
: 0, current_function_decl
,
2524 &all
->stack_args_size
, &data
->locate
);
2526 /* Update parm_stack_boundary if this parameter is passed in the
2528 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2529 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2531 /* Adjust offsets to include the pretend args. */
2532 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2533 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2534 data
->locate
.offset
.constant
+= pretend_bytes
;
2536 data
->entry_parm
= entry_parm
;
2539 /* A subroutine of assign_parms. If there is actually space on the stack
2540 for this parm, count it in stack_args_size and return true. */
2543 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2544 struct assign_parm_data_one
*data
)
2546 /* Trivially true if we've no incoming register. */
2547 if (data
->entry_parm
== NULL
)
2549 /* Also true if we're partially in registers and partially not,
2550 since we've arranged to drop the entire argument on the stack. */
2551 else if (data
->partial
!= 0)
2553 /* Also true if the target says that it's passed in both registers
2554 and on the stack. */
2555 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2556 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2558 /* Also true if the target says that there's stack allocated for
2559 all register parameters. */
2560 else if (all
->reg_parm_stack_space
> 0)
2562 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2566 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2567 if (data
->locate
.size
.var
)
2568 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2573 /* A subroutine of assign_parms. Given that this parameter is allocated
2574 stack space by the ABI, find it. */
2577 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2579 rtx offset_rtx
, stack_parm
;
2580 unsigned int align
, boundary
;
2582 /* If we're passing this arg using a reg, make its stack home the
2583 aligned stack slot. */
2584 if (data
->entry_parm
)
2585 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2587 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2589 stack_parm
= crtl
->args
.internal_arg_pointer
;
2590 if (offset_rtx
!= const0_rtx
)
2591 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2592 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2594 if (!data
->passed_pointer
)
2596 set_mem_attributes (stack_parm
, parm
, 1);
2597 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2598 while promoted mode's size is needed. */
2599 if (data
->promoted_mode
!= BLKmode
2600 && data
->promoted_mode
!= DECL_MODE (parm
))
2602 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2603 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2605 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2606 data
->promoted_mode
);
2608 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2613 boundary
= data
->locate
.boundary
;
2614 align
= BITS_PER_UNIT
;
2616 /* If we're padding upward, we know that the alignment of the slot
2617 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2618 intentionally forcing upward padding. Otherwise we have to come
2619 up with a guess at the alignment based on OFFSET_RTX. */
2620 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2622 else if (CONST_INT_P (offset_rtx
))
2624 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2625 align
= align
& -align
;
2627 set_mem_align (stack_parm
, align
);
2629 if (data
->entry_parm
)
2630 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2632 data
->stack_parm
= stack_parm
;
2635 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2636 always valid and contiguous. */
2639 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2641 rtx entry_parm
= data
->entry_parm
;
2642 rtx stack_parm
= data
->stack_parm
;
2644 /* If this parm was passed part in regs and part in memory, pretend it
2645 arrived entirely in memory by pushing the register-part onto the stack.
2646 In the special case of a DImode or DFmode that is split, we could put
2647 it together in a pseudoreg directly, but for now that's not worth
2649 if (data
->partial
!= 0)
2651 /* Handle calls that pass values in multiple non-contiguous
2652 locations. The Irix 6 ABI has examples of this. */
2653 if (GET_CODE (entry_parm
) == PARALLEL
)
2654 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2656 int_size_in_bytes (data
->passed_type
));
2659 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2660 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2661 data
->partial
/ UNITS_PER_WORD
);
2664 entry_parm
= stack_parm
;
2667 /* If we didn't decide this parm came in a register, by default it came
2669 else if (entry_parm
== NULL
)
2670 entry_parm
= stack_parm
;
2672 /* When an argument is passed in multiple locations, we can't make use
2673 of this information, but we can save some copying if the whole argument
2674 is passed in a single register. */
2675 else if (GET_CODE (entry_parm
) == PARALLEL
2676 && data
->nominal_mode
!= BLKmode
2677 && data
->passed_mode
!= BLKmode
)
2679 size_t i
, len
= XVECLEN (entry_parm
, 0);
2681 for (i
= 0; i
< len
; i
++)
2682 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2683 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2684 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2685 == data
->passed_mode
)
2686 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2688 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2693 data
->entry_parm
= entry_parm
;
2696 /* A subroutine of assign_parms. Reconstitute any values which were
2697 passed in multiple registers and would fit in a single register. */
2700 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2702 rtx entry_parm
= data
->entry_parm
;
2704 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2705 This can be done with register operations rather than on the
2706 stack, even if we will store the reconstituted parameter on the
2708 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2710 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2711 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2712 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2713 entry_parm
= parmreg
;
2716 data
->entry_parm
= entry_parm
;
2719 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2720 always valid and properly aligned. */
2723 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2725 rtx stack_parm
= data
->stack_parm
;
2727 /* If we can't trust the parm stack slot to be aligned enough for its
2728 ultimate type, don't use that slot after entry. We'll make another
2729 stack slot, if we need one. */
2731 && ((STRICT_ALIGNMENT
2732 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2733 || (data
->nominal_type
2734 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2735 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2738 /* If parm was passed in memory, and we need to convert it on entry,
2739 don't store it back in that same slot. */
2740 else if (data
->entry_parm
== stack_parm
2741 && data
->nominal_mode
!= BLKmode
2742 && data
->nominal_mode
!= data
->passed_mode
)
2745 /* If stack protection is in effect for this function, don't leave any
2746 pointers in their passed stack slots. */
2747 else if (crtl
->stack_protect_guard
2748 && (flag_stack_protect
== 2
2749 || data
->passed_pointer
2750 || POINTER_TYPE_P (data
->nominal_type
)))
2753 data
->stack_parm
= stack_parm
;
2756 /* A subroutine of assign_parms. Return true if the current parameter
2757 should be stored as a BLKmode in the current frame. */
2760 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2762 if (data
->nominal_mode
== BLKmode
)
2764 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2767 #ifdef BLOCK_REG_PADDING
2768 /* Only assign_parm_setup_block knows how to deal with register arguments
2769 that are padded at the least significant end. */
2770 if (REG_P (data
->entry_parm
)
2771 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2772 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2773 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2780 /* A subroutine of assign_parms. Arrange for the parameter to be
2781 present and valid in DATA->STACK_RTL. */
2784 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2785 tree parm
, struct assign_parm_data_one
*data
)
2787 rtx entry_parm
= data
->entry_parm
;
2788 rtx stack_parm
= data
->stack_parm
;
2790 HOST_WIDE_INT size_stored
;
2792 if (GET_CODE (entry_parm
) == PARALLEL
)
2793 entry_parm
= emit_group_move_into_temps (entry_parm
);
2795 size
= int_size_in_bytes (data
->passed_type
);
2796 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2797 if (stack_parm
== 0)
2799 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2800 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2802 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2803 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2804 set_mem_attributes (stack_parm
, parm
, 1);
2807 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2808 calls that pass values in multiple non-contiguous locations. */
2809 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2813 /* Note that we will be storing an integral number of words.
2814 So we have to be careful to ensure that we allocate an
2815 integral number of words. We do this above when we call
2816 assign_stack_local if space was not allocated in the argument
2817 list. If it was, this will not work if PARM_BOUNDARY is not
2818 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2819 if it becomes a problem. Exception is when BLKmode arrives
2820 with arguments not conforming to word_mode. */
2822 if (data
->stack_parm
== 0)
2824 else if (GET_CODE (entry_parm
) == PARALLEL
)
2827 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2829 mem
= validize_mem (stack_parm
);
2831 /* Handle values in multiple non-contiguous locations. */
2832 if (GET_CODE (entry_parm
) == PARALLEL
)
2834 push_to_sequence2 (all
->first_conversion_insn
,
2835 all
->last_conversion_insn
);
2836 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2837 all
->first_conversion_insn
= get_insns ();
2838 all
->last_conversion_insn
= get_last_insn ();
2845 /* If SIZE is that of a mode no bigger than a word, just use
2846 that mode's store operation. */
2847 else if (size
<= UNITS_PER_WORD
)
2849 enum machine_mode mode
2850 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2853 #ifdef BLOCK_REG_PADDING
2854 && (size
== UNITS_PER_WORD
2855 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2856 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2862 /* We are really truncating a word_mode value containing
2863 SIZE bytes into a value of mode MODE. If such an
2864 operation requires no actual instructions, we can refer
2865 to the value directly in mode MODE, otherwise we must
2866 start with the register in word_mode and explicitly
2868 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2869 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2872 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2873 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2875 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2878 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2879 machine must be aligned to the left before storing
2880 to memory. Note that the previous test doesn't
2881 handle all cases (e.g. SIZE == 3). */
2882 else if (size
!= UNITS_PER_WORD
2883 #ifdef BLOCK_REG_PADDING
2884 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2892 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2893 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2895 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2896 tem
= change_address (mem
, word_mode
, 0);
2897 emit_move_insn (tem
, x
);
2900 move_block_from_reg (REGNO (entry_parm
), mem
,
2901 size_stored
/ UNITS_PER_WORD
);
2904 move_block_from_reg (REGNO (entry_parm
), mem
,
2905 size_stored
/ UNITS_PER_WORD
);
2907 else if (data
->stack_parm
== 0)
2909 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2910 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2912 all
->first_conversion_insn
= get_insns ();
2913 all
->last_conversion_insn
= get_last_insn ();
2917 data
->stack_parm
= stack_parm
;
2918 SET_DECL_RTL (parm
, stack_parm
);
2921 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2922 parameter. Get it there. Perform all ABI specified conversions. */
2925 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2926 struct assign_parm_data_one
*data
)
2928 rtx parmreg
, validated_mem
;
2929 rtx equiv_stack_parm
;
2930 enum machine_mode promoted_nominal_mode
;
2931 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2932 bool did_conversion
= false;
2933 bool need_conversion
, moved
;
2935 /* Store the parm in a pseudoregister during the function, but we may
2936 need to do it in a wider mode. Using 2 here makes the result
2937 consistent with promote_decl_mode and thus expand_expr_real_1. */
2938 promoted_nominal_mode
2939 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2940 TREE_TYPE (current_function_decl
), 2);
2942 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2944 if (!DECL_ARTIFICIAL (parm
))
2945 mark_user_reg (parmreg
);
2947 /* If this was an item that we received a pointer to,
2948 set DECL_RTL appropriately. */
2949 if (data
->passed_pointer
)
2951 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2952 set_mem_attributes (x
, parm
, 1);
2953 SET_DECL_RTL (parm
, x
);
2956 SET_DECL_RTL (parm
, parmreg
);
2958 assign_parm_remove_parallels (data
);
2960 /* Copy the value into the register, thus bridging between
2961 assign_parm_find_data_types and expand_expr_real_1. */
2963 equiv_stack_parm
= data
->stack_parm
;
2964 validated_mem
= validize_mem (data
->entry_parm
);
2966 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2967 || promoted_nominal_mode
!= data
->promoted_mode
);
2971 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2972 && data
->nominal_mode
== data
->passed_mode
2973 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2975 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2976 mode, by the caller. We now have to convert it to
2977 NOMINAL_MODE, if different. However, PARMREG may be in
2978 a different mode than NOMINAL_MODE if it is being stored
2981 If ENTRY_PARM is a hard register, it might be in a register
2982 not valid for operating in its mode (e.g., an odd-numbered
2983 register for a DFmode). In that case, moves are the only
2984 thing valid, so we can't do a convert from there. This
2985 occurs when the calling sequence allow such misaligned
2988 In addition, the conversion may involve a call, which could
2989 clobber parameters which haven't been copied to pseudo
2992 First, we try to emit an insn which performs the necessary
2993 conversion. We verify that this insn does not clobber any
2996 enum insn_code icode
;
2999 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3003 op1
= validated_mem
;
3004 if (icode
!= CODE_FOR_nothing
3005 && insn_operand_matches (icode
, 0, op0
)
3006 && insn_operand_matches (icode
, 1, op1
))
3008 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3009 rtx insn
, insns
, t
= op1
;
3010 HARD_REG_SET hardregs
;
3013 /* If op1 is a hard register that is likely spilled, first
3014 force it into a pseudo, otherwise combiner might extend
3015 its lifetime too much. */
3016 if (GET_CODE (t
) == SUBREG
)
3019 && HARD_REGISTER_P (t
)
3020 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3021 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3023 t
= gen_reg_rtx (GET_MODE (op1
));
3024 emit_move_insn (t
, op1
);
3028 insn
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3029 data
->passed_mode
, unsignedp
);
3031 insns
= get_insns ();
3034 CLEAR_HARD_REG_SET (hardregs
);
3035 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3038 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3040 if (!hard_reg_set_empty_p (hardregs
))
3049 if (equiv_stack_parm
!= NULL_RTX
)
3050 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3057 /* Nothing to do. */
3059 else if (need_conversion
)
3061 /* We did not have an insn to convert directly, or the sequence
3062 generated appeared unsafe. We must first copy the parm to a
3063 pseudo reg, and save the conversion until after all
3064 parameters have been moved. */
3067 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3069 emit_move_insn (tempreg
, validated_mem
);
3071 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3072 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3074 if (GET_CODE (tempreg
) == SUBREG
3075 && GET_MODE (tempreg
) == data
->nominal_mode
3076 && REG_P (SUBREG_REG (tempreg
))
3077 && data
->nominal_mode
== data
->passed_mode
3078 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3079 && GET_MODE_SIZE (GET_MODE (tempreg
))
3080 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3082 /* The argument is already sign/zero extended, so note it
3084 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3085 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3088 /* TREE_USED gets set erroneously during expand_assignment. */
3089 save_tree_used
= TREE_USED (parm
);
3090 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3091 TREE_USED (parm
) = save_tree_used
;
3092 all
->first_conversion_insn
= get_insns ();
3093 all
->last_conversion_insn
= get_last_insn ();
3096 did_conversion
= true;
3099 emit_move_insn (parmreg
, validated_mem
);
3101 /* If we were passed a pointer but the actual value can safely live
3102 in a register, retrieve it and use it directly. */
3103 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3105 /* We can't use nominal_mode, because it will have been set to
3106 Pmode above. We must use the actual mode of the parm. */
3107 if (use_register_for_decl (parm
))
3109 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3110 mark_user_reg (parmreg
);
3114 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3115 TYPE_MODE (TREE_TYPE (parm
)),
3116 TYPE_ALIGN (TREE_TYPE (parm
)));
3118 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3119 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3121 set_mem_attributes (parmreg
, parm
, 1);
3124 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3126 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3127 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3129 push_to_sequence2 (all
->first_conversion_insn
,
3130 all
->last_conversion_insn
);
3131 emit_move_insn (tempreg
, DECL_RTL (parm
));
3132 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3133 emit_move_insn (parmreg
, tempreg
);
3134 all
->first_conversion_insn
= get_insns ();
3135 all
->last_conversion_insn
= get_last_insn ();
3138 did_conversion
= true;
3141 emit_move_insn (parmreg
, DECL_RTL (parm
));
3143 SET_DECL_RTL (parm
, parmreg
);
3145 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3147 data
->stack_parm
= NULL
;
3150 /* Mark the register as eliminable if we did no conversion and it was
3151 copied from memory at a fixed offset, and the arg pointer was not
3152 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3153 offset formed an invalid address, such memory-equivalences as we
3154 make here would screw up life analysis for it. */
3155 if (data
->nominal_mode
== data
->passed_mode
3157 && data
->stack_parm
!= 0
3158 && MEM_P (data
->stack_parm
)
3159 && data
->locate
.offset
.var
== 0
3160 && reg_mentioned_p (virtual_incoming_args_rtx
,
3161 XEXP (data
->stack_parm
, 0)))
3163 rtx linsn
= get_last_insn ();
3166 /* Mark complex types separately. */
3167 if (GET_CODE (parmreg
) == CONCAT
)
3169 enum machine_mode submode
3170 = GET_MODE_INNER (GET_MODE (parmreg
));
3171 int regnor
= REGNO (XEXP (parmreg
, 0));
3172 int regnoi
= REGNO (XEXP (parmreg
, 1));
3173 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3174 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3175 GET_MODE_SIZE (submode
));
3177 /* Scan backwards for the set of the real and
3179 for (sinsn
= linsn
; sinsn
!= 0;
3180 sinsn
= prev_nonnote_insn (sinsn
))
3182 set
= single_set (sinsn
);
3186 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3187 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3188 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3189 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3193 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3196 /* For pointer data type, suggest pointer register. */
3197 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3198 mark_reg_pointer (parmreg
,
3199 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3202 /* A subroutine of assign_parms. Allocate stack space to hold the current
3203 parameter. Get it there. Perform all ABI specified conversions. */
3206 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3207 struct assign_parm_data_one
*data
)
3209 /* Value must be stored in the stack slot STACK_PARM during function
3211 bool to_conversion
= false;
3213 assign_parm_remove_parallels (data
);
3215 if (data
->promoted_mode
!= data
->nominal_mode
)
3217 /* Conversion is required. */
3218 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3220 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3222 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3223 to_conversion
= true;
3225 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3226 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3228 if (data
->stack_parm
)
3230 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3231 GET_MODE (data
->stack_parm
));
3232 /* ??? This may need a big-endian conversion on sparc64. */
3234 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3235 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3236 set_mem_offset (data
->stack_parm
,
3237 MEM_OFFSET (data
->stack_parm
) + offset
);
3241 if (data
->entry_parm
!= data
->stack_parm
)
3245 if (data
->stack_parm
== 0)
3247 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3248 GET_MODE (data
->entry_parm
),
3249 TYPE_ALIGN (data
->passed_type
));
3251 = assign_stack_local (GET_MODE (data
->entry_parm
),
3252 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3254 set_mem_attributes (data
->stack_parm
, parm
, 1);
3257 dest
= validize_mem (data
->stack_parm
);
3258 src
= validize_mem (data
->entry_parm
);
3262 /* Use a block move to handle potentially misaligned entry_parm. */
3264 push_to_sequence2 (all
->first_conversion_insn
,
3265 all
->last_conversion_insn
);
3266 to_conversion
= true;
3268 emit_block_move (dest
, src
,
3269 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3273 emit_move_insn (dest
, src
);
3278 all
->first_conversion_insn
= get_insns ();
3279 all
->last_conversion_insn
= get_last_insn ();
3283 SET_DECL_RTL (parm
, data
->stack_parm
);
3286 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3287 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3290 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3294 tree orig_fnargs
= all
->orig_fnargs
;
3297 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3299 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3300 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3302 rtx tmp
, real
, imag
;
3303 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3305 real
= DECL_RTL (fnargs
[i
]);
3306 imag
= DECL_RTL (fnargs
[i
+ 1]);
3307 if (inner
!= GET_MODE (real
))
3309 real
= gen_lowpart_SUBREG (inner
, real
);
3310 imag
= gen_lowpart_SUBREG (inner
, imag
);
3313 if (TREE_ADDRESSABLE (parm
))
3316 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3317 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3319 TYPE_ALIGN (TREE_TYPE (parm
)));
3321 /* split_complex_arg put the real and imag parts in
3322 pseudos. Move them to memory. */
3323 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3324 set_mem_attributes (tmp
, parm
, 1);
3325 rmem
= adjust_address_nv (tmp
, inner
, 0);
3326 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3327 push_to_sequence2 (all
->first_conversion_insn
,
3328 all
->last_conversion_insn
);
3329 emit_move_insn (rmem
, real
);
3330 emit_move_insn (imem
, imag
);
3331 all
->first_conversion_insn
= get_insns ();
3332 all
->last_conversion_insn
= get_last_insn ();
3336 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3337 SET_DECL_RTL (parm
, tmp
);
3339 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3340 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3341 if (inner
!= GET_MODE (real
))
3343 real
= gen_lowpart_SUBREG (inner
, real
);
3344 imag
= gen_lowpart_SUBREG (inner
, imag
);
3346 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3347 set_decl_incoming_rtl (parm
, tmp
, false);
3353 /* Assign RTL expressions to the function's parameters. This may involve
3354 copying them into registers and using those registers as the DECL_RTL. */
3357 assign_parms (tree fndecl
)
3359 struct assign_parm_data_all all
;
3364 crtl
->args
.internal_arg_pointer
3365 = targetm
.calls
.internal_arg_pointer ();
3367 assign_parms_initialize_all (&all
);
3368 fnargs
= assign_parms_augmented_arg_list (&all
);
3370 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3372 struct assign_parm_data_one data
;
3374 /* Extract the type of PARM; adjust it according to ABI. */
3375 assign_parm_find_data_types (&all
, parm
, &data
);
3377 /* Early out for errors and void parameters. */
3378 if (data
.passed_mode
== VOIDmode
)
3380 SET_DECL_RTL (parm
, const0_rtx
);
3381 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3385 /* Estimate stack alignment from parameter alignment. */
3386 if (SUPPORTS_STACK_ALIGNMENT
)
3389 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3391 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3393 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3394 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3395 TYPE_MODE (data
.nominal_type
),
3396 TYPE_ALIGN (data
.nominal_type
));
3397 if (crtl
->stack_alignment_estimated
< align
)
3399 gcc_assert (!crtl
->stack_realign_processed
);
3400 crtl
->stack_alignment_estimated
= align
;
3404 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3405 assign_parms_setup_varargs (&all
, &data
, false);
3407 /* Find out where the parameter arrives in this function. */
3408 assign_parm_find_entry_rtl (&all
, &data
);
3410 /* Find out where stack space for this parameter might be. */
3411 if (assign_parm_is_stack_parm (&all
, &data
))
3413 assign_parm_find_stack_rtl (parm
, &data
);
3414 assign_parm_adjust_entry_rtl (&data
);
3417 /* Record permanently how this parm was passed. */
3418 if (data
.passed_pointer
)
3421 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3423 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3426 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3428 /* Update info on where next arg arrives in registers. */
3429 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3430 data
.passed_type
, data
.named_arg
);
3432 assign_parm_adjust_stack_rtl (&data
);
3434 if (assign_parm_setup_block_p (&data
))
3435 assign_parm_setup_block (&all
, parm
, &data
);
3436 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3437 assign_parm_setup_reg (&all
, parm
, &data
);
3439 assign_parm_setup_stack (&all
, parm
, &data
);
3442 if (targetm
.calls
.split_complex_arg
)
3443 assign_parms_unsplit_complex (&all
, fnargs
);
3447 /* Output all parameter conversion instructions (possibly including calls)
3448 now that all parameters have been copied out of hard registers. */
3449 emit_insn (all
.first_conversion_insn
);
3451 /* Estimate reload stack alignment from scalar return mode. */
3452 if (SUPPORTS_STACK_ALIGNMENT
)
3454 if (DECL_RESULT (fndecl
))
3456 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3457 enum machine_mode mode
= TYPE_MODE (type
);
3461 && !AGGREGATE_TYPE_P (type
))
3463 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3464 if (crtl
->stack_alignment_estimated
< align
)
3466 gcc_assert (!crtl
->stack_realign_processed
);
3467 crtl
->stack_alignment_estimated
= align
;
3473 /* If we are receiving a struct value address as the first argument, set up
3474 the RTL for the function result. As this might require code to convert
3475 the transmitted address to Pmode, we do this here to ensure that possible
3476 preliminary conversions of the address have been emitted already. */
3477 if (all
.function_result_decl
)
3479 tree result
= DECL_RESULT (current_function_decl
);
3480 rtx addr
= DECL_RTL (all
.function_result_decl
);
3483 if (DECL_BY_REFERENCE (result
))
3485 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3490 SET_DECL_VALUE_EXPR (result
,
3491 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3492 all
.function_result_decl
));
3493 addr
= convert_memory_address (Pmode
, addr
);
3494 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3495 set_mem_attributes (x
, result
, 1);
3498 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3500 SET_DECL_RTL (result
, x
);
3503 /* We have aligned all the args, so add space for the pretend args. */
3504 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3505 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3506 crtl
->args
.size
= all
.stack_args_size
.constant
;
3508 /* Adjust function incoming argument size for alignment and
3511 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3512 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3513 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3515 #ifdef ARGS_GROW_DOWNWARD
3516 crtl
->args
.arg_offset_rtx
3517 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3518 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3519 size_int (-all
.stack_args_size
.constant
)),
3520 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3522 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3525 /* See how many bytes, if any, of its args a function should try to pop
3528 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3532 /* For stdarg.h function, save info about
3533 regs and stack space used by the named args. */
3535 crtl
->args
.info
= all
.args_so_far_v
;
3537 /* Set the rtx used for the function return value. Put this in its
3538 own variable so any optimizers that need this information don't have
3539 to include tree.h. Do this here so it gets done when an inlined
3540 function gets output. */
3543 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3544 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3546 /* If scalar return value was computed in a pseudo-reg, or was a named
3547 return value that got dumped to the stack, copy that to the hard
3549 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3551 tree decl_result
= DECL_RESULT (fndecl
);
3552 rtx decl_rtl
= DECL_RTL (decl_result
);
3554 if (REG_P (decl_rtl
)
3555 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3556 : DECL_REGISTER (decl_result
))
3560 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3562 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3563 /* The delay slot scheduler assumes that crtl->return_rtx
3564 holds the hard register containing the return value, not a
3565 temporary pseudo. */
3566 crtl
->return_rtx
= real_decl_rtl
;
3571 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3572 For all seen types, gimplify their sizes. */
3575 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3582 if (POINTER_TYPE_P (t
))
3584 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3585 && !TYPE_SIZES_GIMPLIFIED (t
))
3587 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3595 /* Gimplify the parameter list for current_function_decl. This involves
3596 evaluating SAVE_EXPRs of variable sized parameters and generating code
3597 to implement callee-copies reference parameters. Returns a sequence of
3598 statements to add to the beginning of the function. */
3601 gimplify_parameters (void)
3603 struct assign_parm_data_all all
;
3605 gimple_seq stmts
= NULL
;
3609 assign_parms_initialize_all (&all
);
3610 fnargs
= assign_parms_augmented_arg_list (&all
);
3612 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3614 struct assign_parm_data_one data
;
3616 /* Extract the type of PARM; adjust it according to ABI. */
3617 assign_parm_find_data_types (&all
, parm
, &data
);
3619 /* Early out for errors and void parameters. */
3620 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3623 /* Update info on where next arg arrives in registers. */
3624 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3625 data
.passed_type
, data
.named_arg
);
3627 /* ??? Once upon a time variable_size stuffed parameter list
3628 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3629 turned out to be less than manageable in the gimple world.
3630 Now we have to hunt them down ourselves. */
3631 walk_tree_without_duplicates (&data
.passed_type
,
3632 gimplify_parm_type
, &stmts
);
3634 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3636 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3637 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3640 if (data
.passed_pointer
)
3642 tree type
= TREE_TYPE (data
.passed_type
);
3643 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3644 type
, data
.named_arg
))
3648 /* For constant-sized objects, this is trivial; for
3649 variable-sized objects, we have to play games. */
3650 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3651 && !(flag_stack_check
== GENERIC_STACK_CHECK
3652 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3653 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3655 local
= create_tmp_var (type
, get_name (parm
));
3656 DECL_IGNORED_P (local
) = 0;
3657 /* If PARM was addressable, move that flag over
3658 to the local copy, as its address will be taken,
3659 not the PARMs. Keep the parms address taken
3660 as we'll query that flag during gimplification. */
3661 if (TREE_ADDRESSABLE (parm
))
3662 TREE_ADDRESSABLE (local
) = 1;
3663 else if (TREE_CODE (type
) == COMPLEX_TYPE
3664 || TREE_CODE (type
) == VECTOR_TYPE
)
3665 DECL_GIMPLE_REG_P (local
) = 1;
3669 tree ptr_type
, addr
;
3671 ptr_type
= build_pointer_type (type
);
3672 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3673 DECL_IGNORED_P (addr
) = 0;
3674 local
= build_fold_indirect_ref (addr
);
3676 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3677 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3678 size_int (DECL_ALIGN (parm
)));
3680 /* The call has been built for a variable-sized object. */
3681 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3682 t
= fold_convert (ptr_type
, t
);
3683 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3684 gimplify_and_add (t
, &stmts
);
3687 gimplify_assign (local
, parm
, &stmts
);
3689 SET_DECL_VALUE_EXPR (parm
, local
);
3690 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3700 /* Compute the size and offset from the start of the stacked arguments for a
3701 parm passed in mode PASSED_MODE and with type TYPE.
3703 INITIAL_OFFSET_PTR points to the current offset into the stacked
3706 The starting offset and size for this parm are returned in
3707 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3708 nonzero, the offset is that of stack slot, which is returned in
3709 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3710 padding required from the initial offset ptr to the stack slot.
3712 IN_REGS is nonzero if the argument will be passed in registers. It will
3713 never be set if REG_PARM_STACK_SPACE is not defined.
3715 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3716 for arguments which are passed in registers.
3718 FNDECL is the function in which the argument was defined.
3720 There are two types of rounding that are done. The first, controlled by
3721 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3722 argument list to be aligned to the specific boundary (in bits). This
3723 rounding affects the initial and starting offsets, but not the argument
3726 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3727 optionally rounds the size of the parm to PARM_BOUNDARY. The
3728 initial offset is not affected by this rounding, while the size always
3729 is and the starting offset may be. */
3731 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3732 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3733 callers pass in the total size of args so far as
3734 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3737 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3738 int reg_parm_stack_space
, int partial
,
3739 tree fndecl ATTRIBUTE_UNUSED
,
3740 struct args_size
*initial_offset_ptr
,
3741 struct locate_and_pad_arg_data
*locate
)
3744 enum direction where_pad
;
3745 unsigned int boundary
, round_boundary
;
3746 int part_size_in_regs
;
3748 /* If we have found a stack parm before we reach the end of the
3749 area reserved for registers, skip that area. */
3752 if (reg_parm_stack_space
> 0)
3754 if (initial_offset_ptr
->var
)
3756 initial_offset_ptr
->var
3757 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3758 ssize_int (reg_parm_stack_space
));
3759 initial_offset_ptr
->constant
= 0;
3761 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3762 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3766 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3769 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3770 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3771 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3772 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3774 locate
->where_pad
= where_pad
;
3776 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3777 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3778 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3780 locate
->boundary
= boundary
;
3782 if (SUPPORTS_STACK_ALIGNMENT
)
3784 /* stack_alignment_estimated can't change after stack has been
3786 if (crtl
->stack_alignment_estimated
< boundary
)
3788 if (!crtl
->stack_realign_processed
)
3789 crtl
->stack_alignment_estimated
= boundary
;
3792 /* If stack is realigned and stack alignment value
3793 hasn't been finalized, it is OK not to increase
3794 stack_alignment_estimated. The bigger alignment
3795 requirement is recorded in stack_alignment_needed
3797 gcc_assert (!crtl
->stack_realign_finalized
3798 && crtl
->stack_realign_needed
);
3803 /* Remember if the outgoing parameter requires extra alignment on the
3804 calling function side. */
3805 if (crtl
->stack_alignment_needed
< boundary
)
3806 crtl
->stack_alignment_needed
= boundary
;
3807 if (crtl
->preferred_stack_boundary
< boundary
)
3808 crtl
->preferred_stack_boundary
= boundary
;
3810 #ifdef ARGS_GROW_DOWNWARD
3811 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3812 if (initial_offset_ptr
->var
)
3813 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3814 initial_offset_ptr
->var
);
3818 if (where_pad
!= none
3819 && (!tree_fits_uhwi_p (sizetree
)
3820 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
3821 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
3822 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3825 locate
->slot_offset
.constant
+= part_size_in_regs
;
3827 if (!in_regs
|| reg_parm_stack_space
> 0)
3828 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3829 &locate
->alignment_pad
);
3831 locate
->size
.constant
= (-initial_offset_ptr
->constant
3832 - locate
->slot_offset
.constant
);
3833 if (initial_offset_ptr
->var
)
3834 locate
->size
.var
= size_binop (MINUS_EXPR
,
3835 size_binop (MINUS_EXPR
,
3837 initial_offset_ptr
->var
),
3838 locate
->slot_offset
.var
);
3840 /* Pad_below needs the pre-rounded size to know how much to pad
3842 locate
->offset
= locate
->slot_offset
;
3843 if (where_pad
== downward
)
3844 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3846 #else /* !ARGS_GROW_DOWNWARD */
3847 if (!in_regs
|| reg_parm_stack_space
> 0)
3848 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3849 &locate
->alignment_pad
);
3850 locate
->slot_offset
= *initial_offset_ptr
;
3852 #ifdef PUSH_ROUNDING
3853 if (passed_mode
!= BLKmode
)
3854 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3857 /* Pad_below needs the pre-rounded size to know how much to pad below
3858 so this must be done before rounding up. */
3859 locate
->offset
= locate
->slot_offset
;
3860 if (where_pad
== downward
)
3861 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3863 if (where_pad
!= none
3864 && (!tree_fits_uhwi_p (sizetree
)
3865 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
3866 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
3868 ADD_PARM_SIZE (locate
->size
, sizetree
);
3870 locate
->size
.constant
-= part_size_in_regs
;
3871 #endif /* ARGS_GROW_DOWNWARD */
3873 #ifdef FUNCTION_ARG_OFFSET
3874 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3878 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3879 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3882 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3883 struct args_size
*alignment_pad
)
3885 tree save_var
= NULL_TREE
;
3886 HOST_WIDE_INT save_constant
= 0;
3887 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3888 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3890 #ifdef SPARC_STACK_BOUNDARY_HACK
3891 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3892 the real alignment of %sp. However, when it does this, the
3893 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3894 if (SPARC_STACK_BOUNDARY_HACK
)
3898 if (boundary
> PARM_BOUNDARY
)
3900 save_var
= offset_ptr
->var
;
3901 save_constant
= offset_ptr
->constant
;
3904 alignment_pad
->var
= NULL_TREE
;
3905 alignment_pad
->constant
= 0;
3907 if (boundary
> BITS_PER_UNIT
)
3909 if (offset_ptr
->var
)
3911 tree sp_offset_tree
= ssize_int (sp_offset
);
3912 tree offset
= size_binop (PLUS_EXPR
,
3913 ARGS_SIZE_TREE (*offset_ptr
),
3915 #ifdef ARGS_GROW_DOWNWARD
3916 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3918 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3921 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3922 /* ARGS_SIZE_TREE includes constant term. */
3923 offset_ptr
->constant
= 0;
3924 if (boundary
> PARM_BOUNDARY
)
3925 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3930 offset_ptr
->constant
= -sp_offset
+
3931 #ifdef ARGS_GROW_DOWNWARD
3932 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3934 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3936 if (boundary
> PARM_BOUNDARY
)
3937 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3943 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3945 if (passed_mode
!= BLKmode
)
3947 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3948 offset_ptr
->constant
3949 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3950 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3951 - GET_MODE_SIZE (passed_mode
));
3955 if (TREE_CODE (sizetree
) != INTEGER_CST
3956 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3958 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3959 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3961 ADD_PARM_SIZE (*offset_ptr
, s2
);
3962 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3968 /* True if register REGNO was alive at a place where `setjmp' was
3969 called and was set more than once or is an argument. Such regs may
3970 be clobbered by `longjmp'. */
3973 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3975 /* There appear to be cases where some local vars never reach the
3976 backend but have bogus regnos. */
3977 if (regno
>= max_reg_num ())
3980 return ((REG_N_SETS (regno
) > 1
3981 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
3983 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3986 /* Walk the tree of blocks describing the binding levels within a
3987 function and warn about variables the might be killed by setjmp or
3988 vfork. This is done after calling flow_analysis before register
3989 allocation since that will clobber the pseudo-regs to hard
3993 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3997 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
3999 if (TREE_CODE (decl
) == VAR_DECL
4000 && DECL_RTL_SET_P (decl
)
4001 && REG_P (DECL_RTL (decl
))
4002 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4003 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4004 " %<longjmp%> or %<vfork%>", decl
);
4007 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4008 setjmp_vars_warning (setjmp_crosses
, sub
);
4011 /* Do the appropriate part of setjmp_vars_warning
4012 but for arguments instead of local variables. */
4015 setjmp_args_warning (bitmap setjmp_crosses
)
4018 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4019 decl
; decl
= DECL_CHAIN (decl
))
4020 if (DECL_RTL (decl
) != 0
4021 && REG_P (DECL_RTL (decl
))
4022 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4023 warning (OPT_Wclobbered
,
4024 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4028 /* Generate warning messages for variables live across setjmp. */
4031 generate_setjmp_warnings (void)
4033 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4035 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4036 || bitmap_empty_p (setjmp_crosses
))
4039 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4040 setjmp_args_warning (setjmp_crosses
);
4044 /* Reverse the order of elements in the fragment chain T of blocks,
4045 and return the new head of the chain (old last element).
4046 In addition to that clear BLOCK_SAME_RANGE flags when needed
4047 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4048 its super fragment origin. */
4051 block_fragments_nreverse (tree t
)
4053 tree prev
= 0, block
, next
, prev_super
= 0;
4054 tree super
= BLOCK_SUPERCONTEXT (t
);
4055 if (BLOCK_FRAGMENT_ORIGIN (super
))
4056 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4057 for (block
= t
; block
; block
= next
)
4059 next
= BLOCK_FRAGMENT_CHAIN (block
);
4060 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4061 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4062 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4064 BLOCK_SAME_RANGE (block
) = 0;
4065 prev_super
= BLOCK_SUPERCONTEXT (block
);
4066 BLOCK_SUPERCONTEXT (block
) = super
;
4069 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4070 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4072 BLOCK_SAME_RANGE (t
) = 0;
4073 BLOCK_SUPERCONTEXT (t
) = super
;
4077 /* Reverse the order of elements in the chain T of blocks,
4078 and return the new head of the chain (old last element).
4079 Also do the same on subblocks and reverse the order of elements
4080 in BLOCK_FRAGMENT_CHAIN as well. */
4083 blocks_nreverse_all (tree t
)
4085 tree prev
= 0, block
, next
;
4086 for (block
= t
; block
; block
= next
)
4088 next
= BLOCK_CHAIN (block
);
4089 BLOCK_CHAIN (block
) = prev
;
4090 if (BLOCK_FRAGMENT_CHAIN (block
)
4091 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4093 BLOCK_FRAGMENT_CHAIN (block
)
4094 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4095 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4096 BLOCK_SAME_RANGE (block
) = 0;
4098 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4105 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4106 and create duplicate blocks. */
4107 /* ??? Need an option to either create block fragments or to create
4108 abstract origin duplicates of a source block. It really depends
4109 on what optimization has been performed. */
4112 reorder_blocks (void)
4114 tree block
= DECL_INITIAL (current_function_decl
);
4116 if (block
== NULL_TREE
)
4119 stack_vec
<tree
, 10> block_stack
;
4121 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4122 clear_block_marks (block
);
4124 /* Prune the old trees away, so that they don't get in the way. */
4125 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4126 BLOCK_CHAIN (block
) = NULL_TREE
;
4128 /* Recreate the block tree from the note nesting. */
4129 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4130 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4133 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4136 clear_block_marks (tree block
)
4140 TREE_ASM_WRITTEN (block
) = 0;
4141 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4142 block
= BLOCK_CHAIN (block
);
4147 reorder_blocks_1 (rtx insns
, tree current_block
, vec
<tree
> *p_block_stack
)
4150 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4152 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4156 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4158 tree block
= NOTE_BLOCK (insn
);
4161 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4165 BLOCK_SAME_RANGE (prev_end
) = 0;
4166 prev_end
= NULL_TREE
;
4168 /* If we have seen this block before, that means it now
4169 spans multiple address regions. Create a new fragment. */
4170 if (TREE_ASM_WRITTEN (block
))
4172 tree new_block
= copy_node (block
);
4174 BLOCK_SAME_RANGE (new_block
) = 0;
4175 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4176 BLOCK_FRAGMENT_CHAIN (new_block
)
4177 = BLOCK_FRAGMENT_CHAIN (origin
);
4178 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4180 NOTE_BLOCK (insn
) = new_block
;
4184 if (prev_beg
== current_block
&& prev_beg
)
4185 BLOCK_SAME_RANGE (block
) = 1;
4189 BLOCK_SUBBLOCKS (block
) = 0;
4190 TREE_ASM_WRITTEN (block
) = 1;
4191 /* When there's only one block for the entire function,
4192 current_block == block and we mustn't do this, it
4193 will cause infinite recursion. */
4194 if (block
!= current_block
)
4197 if (block
!= origin
)
4198 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4199 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4202 if (p_block_stack
->is_empty ())
4203 super
= current_block
;
4206 super
= p_block_stack
->last ();
4207 gcc_assert (super
== current_block
4208 || BLOCK_FRAGMENT_ORIGIN (super
)
4211 BLOCK_SUPERCONTEXT (block
) = super
;
4212 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4213 BLOCK_SUBBLOCKS (current_block
) = block
;
4214 current_block
= origin
;
4216 p_block_stack
->safe_push (block
);
4218 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4220 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4221 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4222 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4223 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4224 prev_beg
= NULL_TREE
;
4225 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4226 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4231 prev_beg
= NULL_TREE
;
4233 BLOCK_SAME_RANGE (prev_end
) = 0;
4234 prev_end
= NULL_TREE
;
4239 /* Reverse the order of elements in the chain T of blocks,
4240 and return the new head of the chain (old last element). */
4243 blocks_nreverse (tree t
)
4245 tree prev
= 0, block
, next
;
4246 for (block
= t
; block
; block
= next
)
4248 next
= BLOCK_CHAIN (block
);
4249 BLOCK_CHAIN (block
) = prev
;
4255 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4256 by modifying the last node in chain 1 to point to chain 2. */
4259 block_chainon (tree op1
, tree op2
)
4268 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4270 BLOCK_CHAIN (t1
) = op2
;
4272 #ifdef ENABLE_TREE_CHECKING
4275 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4276 gcc_assert (t2
!= t1
);
4283 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4284 non-NULL, list them all into VECTOR, in a depth-first preorder
4285 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4289 all_blocks (tree block
, tree
*vector
)
4295 TREE_ASM_WRITTEN (block
) = 0;
4297 /* Record this block. */
4299 vector
[n_blocks
] = block
;
4303 /* Record the subblocks, and their subblocks... */
4304 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4305 vector
? vector
+ n_blocks
: 0);
4306 block
= BLOCK_CHAIN (block
);
4312 /* Return a vector containing all the blocks rooted at BLOCK. The
4313 number of elements in the vector is stored in N_BLOCKS_P. The
4314 vector is dynamically allocated; it is the caller's responsibility
4315 to call `free' on the pointer returned. */
4318 get_block_vector (tree block
, int *n_blocks_p
)
4322 *n_blocks_p
= all_blocks (block
, NULL
);
4323 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4324 all_blocks (block
, block_vector
);
4326 return block_vector
;
4329 static GTY(()) int next_block_index
= 2;
4331 /* Set BLOCK_NUMBER for all the blocks in FN. */
4334 number_blocks (tree fn
)
4340 /* For SDB and XCOFF debugging output, we start numbering the blocks
4341 from 1 within each function, rather than keeping a running
4343 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4344 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4345 next_block_index
= 1;
4348 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4350 /* The top-level BLOCK isn't numbered at all. */
4351 for (i
= 1; i
< n_blocks
; ++i
)
4352 /* We number the blocks from two. */
4353 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4355 free (block_vector
);
4360 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4363 debug_find_var_in_block_tree (tree var
, tree block
)
4367 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4371 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4373 tree ret
= debug_find_var_in_block_tree (var
, t
);
4381 /* Keep track of whether we're in a dummy function context. If we are,
4382 we don't want to invoke the set_current_function hook, because we'll
4383 get into trouble if the hook calls target_reinit () recursively or
4384 when the initial initialization is not yet complete. */
4386 static bool in_dummy_function
;
4388 /* Invoke the target hook when setting cfun. Update the optimization options
4389 if the function uses different options than the default. */
4392 invoke_set_current_function_hook (tree fndecl
)
4394 if (!in_dummy_function
)
4396 tree opts
= ((fndecl
)
4397 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4398 : optimization_default_node
);
4401 opts
= optimization_default_node
;
4403 /* Change optimization options if needed. */
4404 if (optimization_current_node
!= opts
)
4406 optimization_current_node
= opts
;
4407 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4410 targetm
.set_current_function (fndecl
);
4411 this_fn_optabs
= this_target_optabs
;
4413 if (opts
!= optimization_default_node
)
4415 init_tree_optimization_optabs (opts
);
4416 if (TREE_OPTIMIZATION_OPTABS (opts
))
4417 this_fn_optabs
= (struct target_optabs
*)
4418 TREE_OPTIMIZATION_OPTABS (opts
);
4423 /* cfun should never be set directly; use this function. */
4426 set_cfun (struct function
*new_cfun
)
4428 if (cfun
!= new_cfun
)
4431 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4435 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4437 static vec
<function_p
> cfun_stack
;
4439 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4440 current_function_decl accordingly. */
4443 push_cfun (struct function
*new_cfun
)
4445 gcc_assert ((!cfun
&& !current_function_decl
)
4446 || (cfun
&& current_function_decl
== cfun
->decl
));
4447 cfun_stack
.safe_push (cfun
);
4448 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4449 set_cfun (new_cfun
);
4452 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4457 struct function
*new_cfun
= cfun_stack
.pop ();
4458 /* When in_dummy_function, we do have a cfun but current_function_decl is
4459 NULL. We also allow pushing NULL cfun and subsequently changing
4460 current_function_decl to something else and have both restored by
4462 gcc_checking_assert (in_dummy_function
4464 || current_function_decl
== cfun
->decl
);
4465 set_cfun (new_cfun
);
4466 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4469 /* Return value of funcdef and increase it. */
4471 get_next_funcdef_no (void)
4473 return funcdef_no
++;
4476 /* Return value of funcdef. */
4478 get_last_funcdef_no (void)
4483 /* Allocate a function structure for FNDECL and set its contents
4484 to the defaults. Set cfun to the newly-allocated object.
4485 Some of the helper functions invoked during initialization assume
4486 that cfun has already been set. Therefore, assign the new object
4487 directly into cfun and invoke the back end hook explicitly at the
4488 very end, rather than initializing a temporary and calling set_cfun
4491 ABSTRACT_P is true if this is a function that will never be seen by
4492 the middle-end. Such functions are front-end concepts (like C++
4493 function templates) that do not correspond directly to functions
4494 placed in object files. */
4497 allocate_struct_function (tree fndecl
, bool abstract_p
)
4499 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4501 cfun
= ggc_alloc_cleared_function ();
4503 init_eh_for_function ();
4505 if (init_machine_status
)
4506 cfun
->machine
= (*init_machine_status
) ();
4508 #ifdef OVERRIDE_ABI_FORMAT
4509 OVERRIDE_ABI_FORMAT (fndecl
);
4512 if (fndecl
!= NULL_TREE
)
4514 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4515 cfun
->decl
= fndecl
;
4516 current_function_funcdef_no
= get_next_funcdef_no ();
4519 invoke_set_current_function_hook (fndecl
);
4521 if (fndecl
!= NULL_TREE
)
4523 tree result
= DECL_RESULT (fndecl
);
4524 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4526 #ifdef PCC_STATIC_STRUCT_RETURN
4527 cfun
->returns_pcc_struct
= 1;
4529 cfun
->returns_struct
= 1;
4532 cfun
->stdarg
= stdarg_p (fntype
);
4534 /* Assume all registers in stdarg functions need to be saved. */
4535 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4536 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4538 /* ??? This could be set on a per-function basis by the front-end
4539 but is this worth the hassle? */
4540 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4544 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4545 instead of just setting it. */
4548 push_struct_function (tree fndecl
)
4550 /* When in_dummy_function we might be in the middle of a pop_cfun and
4551 current_function_decl and cfun may not match. */
4552 gcc_assert (in_dummy_function
4553 || (!cfun
&& !current_function_decl
)
4554 || (cfun
&& current_function_decl
== cfun
->decl
));
4555 cfun_stack
.safe_push (cfun
);
4556 current_function_decl
= fndecl
;
4557 allocate_struct_function (fndecl
, false);
4560 /* Reset crtl and other non-struct-function variables to defaults as
4561 appropriate for emitting rtl at the start of a function. */
4564 prepare_function_start (void)
4566 gcc_assert (!crtl
->emit
.x_last_insn
);
4569 init_varasm_status ();
4571 default_rtl_profile ();
4573 if (flag_stack_usage_info
)
4575 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4576 cfun
->su
->static_stack_size
= -1;
4579 cse_not_expected
= ! optimize
;
4581 /* Caller save not needed yet. */
4582 caller_save_needed
= 0;
4584 /* We haven't done register allocation yet. */
4587 /* Indicate that we have not instantiated virtual registers yet. */
4588 virtuals_instantiated
= 0;
4590 /* Indicate that we want CONCATs now. */
4591 generating_concat_p
= 1;
4593 /* Indicate we have no need of a frame pointer yet. */
4594 frame_pointer_needed
= 0;
4597 /* Initialize the rtl expansion mechanism so that we can do simple things
4598 like generate sequences. This is used to provide a context during global
4599 initialization of some passes. You must call expand_dummy_function_end
4600 to exit this context. */
4603 init_dummy_function_start (void)
4605 gcc_assert (!in_dummy_function
);
4606 in_dummy_function
= true;
4607 push_struct_function (NULL_TREE
);
4608 prepare_function_start ();
4611 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4612 and initialize static variables for generating RTL for the statements
4616 init_function_start (tree subr
)
4618 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4619 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4621 allocate_struct_function (subr
, false);
4622 prepare_function_start ();
4623 decide_function_section (subr
);
4625 /* Warn if this value is an aggregate type,
4626 regardless of which calling convention we are using for it. */
4627 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4628 warning (OPT_Waggregate_return
, "function returns an aggregate");
4631 /* Expand code to verify the stack_protect_guard. This is invoked at
4632 the end of a function to be protected. */
4634 #ifndef HAVE_stack_protect_test
4635 # define HAVE_stack_protect_test 0
4636 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4640 stack_protect_epilogue (void)
4642 tree guard_decl
= targetm
.stack_protect_guard ();
4643 rtx label
= gen_label_rtx ();
4646 x
= expand_normal (crtl
->stack_protect_guard
);
4647 y
= expand_normal (guard_decl
);
4649 /* Allow the target to compare Y with X without leaking either into
4651 switch (HAVE_stack_protect_test
!= 0)
4654 tmp
= gen_stack_protect_test (x
, y
, label
);
4663 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4667 /* The noreturn predictor has been moved to the tree level. The rtl-level
4668 predictors estimate this branch about 20%, which isn't enough to get
4669 things moved out of line. Since this is the only extant case of adding
4670 a noreturn function at the rtl level, it doesn't seem worth doing ought
4671 except adding the prediction by hand. */
4672 tmp
= get_last_insn ();
4674 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4676 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4681 /* Start the RTL for a new function, and set variables used for
4683 SUBR is the FUNCTION_DECL node.
4684 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4685 the function's parameters, which must be run at any return statement. */
4688 expand_function_start (tree subr
)
4690 /* Make sure volatile mem refs aren't considered
4691 valid operands of arithmetic insns. */
4692 init_recog_no_volatile ();
4696 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4699 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4701 /* Make the label for return statements to jump to. Do not special
4702 case machines with special return instructions -- they will be
4703 handled later during jump, ifcvt, or epilogue creation. */
4704 return_label
= gen_label_rtx ();
4706 /* Initialize rtx used to return the value. */
4707 /* Do this before assign_parms so that we copy the struct value address
4708 before any library calls that assign parms might generate. */
4710 /* Decide whether to return the value in memory or in a register. */
4711 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4713 /* Returning something that won't go in a register. */
4714 rtx value_address
= 0;
4716 #ifdef PCC_STATIC_STRUCT_RETURN
4717 if (cfun
->returns_pcc_struct
)
4719 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4720 value_address
= assemble_static_space (size
);
4725 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4726 /* Expect to be passed the address of a place to store the value.
4727 If it is passed as an argument, assign_parms will take care of
4731 value_address
= gen_reg_rtx (Pmode
);
4732 emit_move_insn (value_address
, sv
);
4737 rtx x
= value_address
;
4738 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4740 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4741 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4743 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4746 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4747 /* If return mode is void, this decl rtl should not be used. */
4748 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4751 /* Compute the return values into a pseudo reg, which we will copy
4752 into the true return register after the cleanups are done. */
4753 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4754 if (TYPE_MODE (return_type
) != BLKmode
4755 && targetm
.calls
.return_in_msb (return_type
))
4756 /* expand_function_end will insert the appropriate padding in
4757 this case. Use the return value's natural (unpadded) mode
4758 within the function proper. */
4759 SET_DECL_RTL (DECL_RESULT (subr
),
4760 gen_reg_rtx (TYPE_MODE (return_type
)));
4763 /* In order to figure out what mode to use for the pseudo, we
4764 figure out what the mode of the eventual return register will
4765 actually be, and use that. */
4766 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4768 /* Structures that are returned in registers are not
4769 aggregate_value_p, so we may see a PARALLEL or a REG. */
4770 if (REG_P (hard_reg
))
4771 SET_DECL_RTL (DECL_RESULT (subr
),
4772 gen_reg_rtx (GET_MODE (hard_reg
)));
4775 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4776 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4780 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4781 result to the real return register(s). */
4782 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4785 /* Initialize rtx for parameters and local variables.
4786 In some cases this requires emitting insns. */
4787 assign_parms (subr
);
4789 /* If function gets a static chain arg, store it. */
4790 if (cfun
->static_chain_decl
)
4792 tree parm
= cfun
->static_chain_decl
;
4793 rtx local
, chain
, insn
;
4795 local
= gen_reg_rtx (Pmode
);
4796 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4798 set_decl_incoming_rtl (parm
, chain
, false);
4799 SET_DECL_RTL (parm
, local
);
4800 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4802 insn
= emit_move_insn (local
, chain
);
4804 /* Mark the register as eliminable, similar to parameters. */
4806 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4807 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
4810 /* If the function receives a non-local goto, then store the
4811 bits we need to restore the frame pointer. */
4812 if (cfun
->nonlocal_goto_save_area
)
4817 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4818 gcc_assert (DECL_RTL_SET_P (var
));
4820 t_save
= build4 (ARRAY_REF
,
4821 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
4822 cfun
->nonlocal_goto_save_area
,
4823 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4824 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4825 gcc_assert (GET_MODE (r_save
) == Pmode
);
4827 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4828 update_nonlocal_goto_save_area ();
4831 /* The following was moved from init_function_start.
4832 The move is supposed to make sdb output more accurate. */
4833 /* Indicate the beginning of the function body,
4834 as opposed to parm setup. */
4835 emit_note (NOTE_INSN_FUNCTION_BEG
);
4837 gcc_assert (NOTE_P (get_last_insn ()));
4839 parm_birth_insn
= get_last_insn ();
4844 PROFILE_HOOK (current_function_funcdef_no
);
4848 /* If we are doing generic stack checking, the probe should go here. */
4849 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4850 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4853 /* Undo the effects of init_dummy_function_start. */
4855 expand_dummy_function_end (void)
4857 gcc_assert (in_dummy_function
);
4859 /* End any sequences that failed to be closed due to syntax errors. */
4860 while (in_sequence_p ())
4863 /* Outside function body, can't compute type's actual size
4864 until next function's body starts. */
4866 free_after_parsing (cfun
);
4867 free_after_compilation (cfun
);
4869 in_dummy_function
= false;
4872 /* Call DOIT for each hard register used as a return value from
4873 the current function. */
4876 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4878 rtx outgoing
= crtl
->return_rtx
;
4883 if (REG_P (outgoing
))
4884 (*doit
) (outgoing
, arg
);
4885 else if (GET_CODE (outgoing
) == PARALLEL
)
4889 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4891 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4893 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4900 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4906 clobber_return_register (void)
4908 diddle_return_value (do_clobber_return_reg
, NULL
);
4910 /* In case we do use pseudo to return value, clobber it too. */
4911 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4913 tree decl_result
= DECL_RESULT (current_function_decl
);
4914 rtx decl_rtl
= DECL_RTL (decl_result
);
4915 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4917 do_clobber_return_reg (decl_rtl
, NULL
);
4923 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4929 use_return_register (void)
4931 diddle_return_value (do_use_return_reg
, NULL
);
4934 /* Possibly warn about unused parameters. */
4936 do_warn_unused_parameter (tree fn
)
4940 for (decl
= DECL_ARGUMENTS (fn
);
4941 decl
; decl
= DECL_CHAIN (decl
))
4942 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4943 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4944 && !TREE_NO_WARNING (decl
))
4945 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4948 /* Set the location of the insn chain starting at INSN to LOC. */
4951 set_insn_locations (rtx insn
, int loc
)
4953 while (insn
!= NULL_RTX
)
4956 INSN_LOCATION (insn
) = loc
;
4957 insn
= NEXT_INSN (insn
);
4961 /* Generate RTL for the end of the current function. */
4964 expand_function_end (void)
4968 /* If arg_pointer_save_area was referenced only from a nested
4969 function, we will not have initialized it yet. Do that now. */
4970 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4971 get_arg_pointer_save_area ();
4973 /* If we are doing generic stack checking and this function makes calls,
4974 do a stack probe at the start of the function to ensure we have enough
4975 space for another stack frame. */
4976 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4980 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4983 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
4985 if (STACK_CHECK_MOVING_SP
)
4986 anti_adjust_stack_and_probe (max_frame_size
, true);
4988 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
4991 set_insn_locations (seq
, prologue_location
);
4992 emit_insn_before (seq
, stack_check_probe_note
);
4997 /* End any sequences that failed to be closed due to syntax errors. */
4998 while (in_sequence_p ())
5001 clear_pending_stack_adjust ();
5002 do_pending_stack_adjust ();
5004 /* Output a linenumber for the end of the function.
5005 SDB depends on this. */
5006 set_curr_insn_location (input_location
);
5008 /* Before the return label (if any), clobber the return
5009 registers so that they are not propagated live to the rest of
5010 the function. This can only happen with functions that drop
5011 through; if there had been a return statement, there would
5012 have either been a return rtx, or a jump to the return label.
5014 We delay actual code generation after the current_function_value_rtx
5016 clobber_after
= get_last_insn ();
5018 /* Output the label for the actual return from the function. */
5019 emit_label (return_label
);
5021 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5023 /* Let except.c know where it should emit the call to unregister
5024 the function context for sjlj exceptions. */
5025 if (flag_exceptions
)
5026 sjlj_emit_function_exit_after (get_last_insn ());
5030 /* We want to ensure that instructions that may trap are not
5031 moved into the epilogue by scheduling, because we don't
5032 always emit unwind information for the epilogue. */
5033 if (cfun
->can_throw_non_call_exceptions
)
5034 emit_insn (gen_blockage ());
5037 /* If this is an implementation of throw, do what's necessary to
5038 communicate between __builtin_eh_return and the epilogue. */
5039 expand_eh_return ();
5041 /* If scalar return value was computed in a pseudo-reg, or was a named
5042 return value that got dumped to the stack, copy that to the hard
5044 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5046 tree decl_result
= DECL_RESULT (current_function_decl
);
5047 rtx decl_rtl
= DECL_RTL (decl_result
);
5049 if (REG_P (decl_rtl
)
5050 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5051 : DECL_REGISTER (decl_result
))
5053 rtx real_decl_rtl
= crtl
->return_rtx
;
5055 /* This should be set in assign_parms. */
5056 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5058 /* If this is a BLKmode structure being returned in registers,
5059 then use the mode computed in expand_return. Note that if
5060 decl_rtl is memory, then its mode may have been changed,
5061 but that crtl->return_rtx has not. */
5062 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5063 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5065 /* If a non-BLKmode return value should be padded at the least
5066 significant end of the register, shift it left by the appropriate
5067 amount. BLKmode results are handled using the group load/store
5069 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5070 && REG_P (real_decl_rtl
)
5071 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5073 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5074 REGNO (real_decl_rtl
)),
5076 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5078 /* If a named return value dumped decl_return to memory, then
5079 we may need to re-do the PROMOTE_MODE signed/unsigned
5081 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5083 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5084 promote_function_mode (TREE_TYPE (decl_result
),
5085 GET_MODE (decl_rtl
), &unsignedp
,
5086 TREE_TYPE (current_function_decl
), 1);
5088 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5090 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5092 /* If expand_function_start has created a PARALLEL for decl_rtl,
5093 move the result to the real return registers. Otherwise, do
5094 a group load from decl_rtl for a named return. */
5095 if (GET_CODE (decl_rtl
) == PARALLEL
)
5096 emit_group_move (real_decl_rtl
, decl_rtl
);
5098 emit_group_load (real_decl_rtl
, decl_rtl
,
5099 TREE_TYPE (decl_result
),
5100 int_size_in_bytes (TREE_TYPE (decl_result
)));
5102 /* In the case of complex integer modes smaller than a word, we'll
5103 need to generate some non-trivial bitfield insertions. Do that
5104 on a pseudo and not the hard register. */
5105 else if (GET_CODE (decl_rtl
) == CONCAT
5106 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5107 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5109 int old_generating_concat_p
;
5112 old_generating_concat_p
= generating_concat_p
;
5113 generating_concat_p
= 0;
5114 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5115 generating_concat_p
= old_generating_concat_p
;
5117 emit_move_insn (tmp
, decl_rtl
);
5118 emit_move_insn (real_decl_rtl
, tmp
);
5121 emit_move_insn (real_decl_rtl
, decl_rtl
);
5125 /* If returning a structure, arrange to return the address of the value
5126 in a place where debuggers expect to find it.
5128 If returning a structure PCC style,
5129 the caller also depends on this value.
5130 And cfun->returns_pcc_struct is not necessarily set. */
5131 if (cfun
->returns_struct
5132 || cfun
->returns_pcc_struct
)
5134 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5135 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5138 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5139 type
= TREE_TYPE (type
);
5141 value_address
= XEXP (value_address
, 0);
5143 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5144 current_function_decl
, true);
5146 /* Mark this as a function return value so integrate will delete the
5147 assignment and USE below when inlining this function. */
5148 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5150 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5151 value_address
= convert_memory_address (GET_MODE (outgoing
),
5154 emit_move_insn (outgoing
, value_address
);
5156 /* Show return register used to hold result (in this case the address
5158 crtl
->return_rtx
= outgoing
;
5161 /* Emit the actual code to clobber return register. */
5166 clobber_return_register ();
5170 emit_insn_after (seq
, clobber_after
);
5173 /* Output the label for the naked return from the function. */
5174 if (naked_return_label
)
5175 emit_label (naked_return_label
);
5177 /* @@@ This is a kludge. We want to ensure that instructions that
5178 may trap are not moved into the epilogue by scheduling, because
5179 we don't always emit unwind information for the epilogue. */
5180 if (cfun
->can_throw_non_call_exceptions
5181 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5182 emit_insn (gen_blockage ());
5184 /* If stack protection is enabled for this function, check the guard. */
5185 if (crtl
->stack_protect_guard
)
5186 stack_protect_epilogue ();
5188 /* If we had calls to alloca, and this machine needs
5189 an accurate stack pointer to exit the function,
5190 insert some code to save and restore the stack pointer. */
5191 if (! EXIT_IGNORE_STACK
5192 && cfun
->calls_alloca
)
5197 emit_stack_save (SAVE_FUNCTION
, &tem
);
5200 emit_insn_before (seq
, parm_birth_insn
);
5202 emit_stack_restore (SAVE_FUNCTION
, tem
);
5205 /* ??? This should no longer be necessary since stupid is no longer with
5206 us, but there are some parts of the compiler (eg reload_combine, and
5207 sh mach_dep_reorg) that still try and compute their own lifetime info
5208 instead of using the general framework. */
5209 use_return_register ();
5213 get_arg_pointer_save_area (void)
5215 rtx ret
= arg_pointer_save_area
;
5219 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5220 arg_pointer_save_area
= ret
;
5223 if (! crtl
->arg_pointer_save_area_init
)
5227 /* Save the arg pointer at the beginning of the function. The
5228 generated stack slot may not be a valid memory address, so we
5229 have to check it and fix it if necessary. */
5231 emit_move_insn (validize_mem (ret
),
5232 crtl
->args
.internal_arg_pointer
);
5236 push_topmost_sequence ();
5237 emit_insn_after (seq
, entry_of_function ());
5238 pop_topmost_sequence ();
5240 crtl
->arg_pointer_save_area_init
= true;
5246 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5247 for the first time. */
5250 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5253 htab_t hash
= *hashp
;
5257 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5259 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5261 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5262 gcc_assert (*slot
== NULL
);
5267 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5268 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5269 insn, then record COPY as well. */
5272 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5277 hash
= epilogue_insn_hash
;
5278 if (!hash
|| !htab_find (hash
, insn
))
5280 hash
= prologue_insn_hash
;
5281 if (!hash
|| !htab_find (hash
, insn
))
5285 slot
= htab_find_slot (hash
, copy
, INSERT
);
5286 gcc_assert (*slot
== NULL
);
5290 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5291 we can be running after reorg, SEQUENCE rtl is possible. */
5294 contains (const_rtx insn
, htab_t hash
)
5299 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5302 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5303 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5308 return htab_find (hash
, insn
) != NULL
;
5312 prologue_epilogue_contains (const_rtx insn
)
5314 if (contains (insn
, prologue_insn_hash
))
5316 if (contains (insn
, epilogue_insn_hash
))
5321 #ifdef HAVE_simple_return
5323 /* Return true if INSN requires the stack frame to be set up.
5324 PROLOGUE_USED contains the hard registers used in the function
5325 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5326 prologue to set up for the function. */
5328 requires_stack_frame_p (rtx insn
, HARD_REG_SET prologue_used
,
5329 HARD_REG_SET set_up_by_prologue
)
5332 HARD_REG_SET hardregs
;
5336 return !SIBLING_CALL_P (insn
);
5338 /* We need a frame to get the unique CFA expected by the unwinder. */
5339 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5342 CLEAR_HARD_REG_SET (hardregs
);
5343 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
5345 rtx dreg
= DF_REF_REG (*df_rec
);
5350 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
5353 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
5355 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
5356 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5357 if (TEST_HARD_REG_BIT (hardregs
, regno
)
5358 && df_regs_ever_live_p (regno
))
5361 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
5363 rtx reg
= DF_REF_REG (*df_rec
);
5368 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
5371 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
5377 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5378 and if BB is its only predecessor. Return that block if so,
5379 otherwise return null. */
5382 next_block_for_reg (basic_block bb
, int regno
, int end_regno
)
5390 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5392 live
= df_get_live_in (e
->dest
);
5393 for (i
= regno
; i
< end_regno
; i
++)
5394 if (REGNO_REG_SET_P (live
, i
))
5396 if (live_edge
&& live_edge
!= e
)
5402 /* We can sometimes encounter dead code. Don't try to move it
5403 into the exit block. */
5404 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
5407 /* Reject targets of abnormal edges. This is needed for correctness
5408 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5409 exception edges even though it is generally treated as call-saved
5410 for the majority of the compilation. Moving across abnormal edges
5411 isn't going to be interesting for shrink-wrap usage anyway. */
5412 if (live_edge
->flags
& EDGE_ABNORMAL
)
5415 if (EDGE_COUNT (live_edge
->dest
->preds
) > 1)
5418 return live_edge
->dest
;
5421 /* Try to move INSN from BB to a successor. Return true on success.
5422 USES and DEFS are the set of registers that are used and defined
5423 after INSN in BB. */
5426 move_insn_for_shrink_wrap (basic_block bb
, rtx insn
,
5427 const HARD_REG_SET uses
,
5428 const HARD_REG_SET defs
)
5431 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
5432 unsigned int i
, dregno
, end_dregno
, sregno
, end_sregno
;
5433 basic_block next_block
;
5435 /* Look for a simple register copy. */
5436 set
= single_set (insn
);
5439 src
= SET_SRC (set
);
5440 dest
= SET_DEST (set
);
5441 if (!REG_P (dest
) || !REG_P (src
))
5444 /* Make sure that the source register isn't defined later in BB. */
5445 sregno
= REGNO (src
);
5446 end_sregno
= END_REGNO (src
);
5447 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
5450 /* Make sure that the destination register isn't referenced later in BB. */
5451 dregno
= REGNO (dest
);
5452 end_dregno
= END_REGNO (dest
);
5453 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
5454 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
5457 /* See whether there is a successor block to which we could move INSN. */
5458 next_block
= next_block_for_reg (bb
, dregno
, end_dregno
);
5462 /* At this point we are committed to moving INSN, but let's try to
5463 move it as far as we can. */
5466 live_out
= df_get_live_out (bb
);
5467 live_in
= df_get_live_in (next_block
);
5470 /* Check whether BB uses DEST or clobbers DEST. We need to add
5471 INSN to BB if so. Either way, DEST is no longer live on entry,
5472 except for any part that overlaps SRC (next loop). */
5473 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
5474 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
5477 for (i
= dregno
; i
< end_dregno
; i
++)
5479 if (REGNO_REG_SET_P (bb_uses
, i
) || REGNO_REG_SET_P (bb_defs
, i
)
5480 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5482 CLEAR_REGNO_REG_SET (live_out
, i
);
5483 CLEAR_REGNO_REG_SET (live_in
, i
);
5486 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5487 Either way, SRC is now live on entry. */
5488 for (i
= sregno
; i
< end_sregno
; i
++)
5490 if (REGNO_REG_SET_P (bb_defs
, i
)
5491 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5493 SET_REGNO_REG_SET (live_out
, i
);
5494 SET_REGNO_REG_SET (live_in
, i
);
5499 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
5500 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
5501 at -O1, just give up searching NEXT_BLOCK. */
5503 for (i
= dregno
; i
< end_dregno
; i
++)
5505 CLEAR_REGNO_REG_SET (live_out
, i
);
5506 CLEAR_REGNO_REG_SET (live_in
, i
);
5509 for (i
= sregno
; i
< end_sregno
; i
++)
5511 SET_REGNO_REG_SET (live_out
, i
);
5512 SET_REGNO_REG_SET (live_in
, i
);
5516 /* If we don't need to add the move to BB, look for a single
5519 next_block
= next_block_for_reg (next_block
, dregno
, end_dregno
);
5523 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5525 for (i
= dregno
; i
< end_dregno
; i
++)
5527 CLEAR_REGNO_REG_SET (bb_uses
, i
);
5528 SET_REGNO_REG_SET (bb_defs
, i
);
5531 /* BB now uses SRC. */
5532 for (i
= sregno
; i
< end_sregno
; i
++)
5533 SET_REGNO_REG_SET (bb_uses
, i
);
5535 emit_insn_after (PATTERN (insn
), bb_note (bb
));
5540 /* Look for register copies in the first block of the function, and move
5541 them down into successor blocks if the register is used only on one
5542 path. This exposes more opportunities for shrink-wrapping. These
5543 kinds of sets often occur when incoming argument registers are moved
5544 to call-saved registers because their values are live across one or
5545 more calls during the function. */
5548 prepare_shrink_wrap (basic_block entry_block
)
5551 HARD_REG_SET uses
, defs
;
5554 CLEAR_HARD_REG_SET (uses
);
5555 CLEAR_HARD_REG_SET (defs
);
5556 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
5557 if (NONDEBUG_INSN_P (insn
)
5558 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
))
5560 /* Add all defined registers to DEFs. */
5561 for (ref
= DF_INSN_DEFS (insn
); *ref
; ref
++)
5563 x
= DF_REF_REG (*ref
);
5564 if (REG_P (x
) && HARD_REGISTER_P (x
))
5565 SET_HARD_REG_BIT (defs
, REGNO (x
));
5568 /* Add all used registers to USESs. */
5569 for (ref
= DF_INSN_USES (insn
); *ref
; ref
++)
5571 x
= DF_REF_REG (*ref
);
5572 if (REG_P (x
) && HARD_REGISTER_P (x
))
5573 SET_HARD_REG_BIT (uses
, REGNO (x
));
5581 /* Insert use of return register before the end of BB. */
5584 emit_use_return_register_into_block (basic_block bb
)
5588 use_return_register ();
5593 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5594 insn
= prev_cc0_setter (insn
);
5596 emit_insn_before (seq
, insn
);
5600 /* Create a return pattern, either simple_return or return, depending on
5604 gen_return_pattern (bool simple_p
)
5606 #ifdef HAVE_simple_return
5607 return simple_p
? gen_simple_return () : gen_return ();
5609 gcc_assert (!simple_p
);
5610 return gen_return ();
5614 /* Insert an appropriate return pattern at the end of block BB. This
5615 also means updating block_for_insn appropriately. SIMPLE_P is
5616 the same as in gen_return_pattern and passed to it. */
5619 emit_return_into_block (bool simple_p
, basic_block bb
)
5622 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5623 pat
= PATTERN (jump
);
5624 if (GET_CODE (pat
) == PARALLEL
)
5625 pat
= XVECEXP (pat
, 0, 0);
5626 gcc_assert (ANY_RETURN_P (pat
));
5627 JUMP_LABEL (jump
) = pat
;
5631 /* Set JUMP_LABEL for a return insn. */
5634 set_return_jump_label (rtx returnjump
)
5636 rtx pat
= PATTERN (returnjump
);
5637 if (GET_CODE (pat
) == PARALLEL
)
5638 pat
= XVECEXP (pat
, 0, 0);
5639 if (ANY_RETURN_P (pat
))
5640 JUMP_LABEL (returnjump
) = pat
;
5642 JUMP_LABEL (returnjump
) = ret_rtx
;
5645 #ifdef HAVE_simple_return
5646 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5647 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5649 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx before
,
5650 bitmap_head
*need_prologue
)
5654 rtx insn
= BB_END (bb
);
5656 /* We know BB has a single successor, so there is no need to copy a
5657 simple jump at the end of BB. */
5658 if (simplejump_p (insn
))
5659 insn
= PREV_INSN (insn
);
5662 duplicate_insn_chain (BB_HEAD (bb
), insn
);
5666 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5667 if (active_insn_p (insn
))
5669 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
5670 bb
->index
, copy_bb
->index
, count
);
5672 insn
= get_insns ();
5674 emit_insn_before (insn
, before
);
5676 /* Redirect all the paths that need no prologue into copy_bb. */
5677 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
5678 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
5680 int freq
= EDGE_FREQUENCY (e
);
5681 copy_bb
->count
+= e
->count
;
5682 copy_bb
->frequency
+= EDGE_FREQUENCY (e
);
5683 e
->dest
->count
-= e
->count
;
5684 if (e
->dest
->count
< 0)
5686 e
->dest
->frequency
-= freq
;
5687 if (e
->dest
->frequency
< 0)
5688 e
->dest
->frequency
= 0;
5689 redirect_edge_and_branch_force (e
, copy_bb
);
5697 #if defined (HAVE_return) || defined (HAVE_simple_return)
5698 /* Return true if there are any active insns between HEAD and TAIL. */
5700 active_insn_between (rtx head
, rtx tail
)
5704 if (active_insn_p (tail
))
5708 tail
= PREV_INSN (tail
);
5713 /* LAST_BB is a block that exits, and empty of active instructions.
5714 Examine its predecessors for jumps that can be converted to
5715 (conditional) returns. */
5717 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5718 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5725 vec
<basic_block
> src_bbs
;
5727 src_bbs
.create (EDGE_COUNT (last_bb
->preds
));
5728 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5729 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5730 src_bbs
.quick_push (e
->src
);
5732 label
= BB_HEAD (last_bb
);
5734 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5736 rtx jump
= BB_END (bb
);
5738 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5741 e
= find_edge (bb
, last_bb
);
5743 /* If we have an unconditional jump, we can replace that
5744 with a simple return instruction. */
5745 if (simplejump_p (jump
))
5747 /* The use of the return register might be present in the exit
5748 fallthru block. Either:
5749 - removing the use is safe, and we should remove the use in
5750 the exit fallthru block, or
5751 - removing the use is not safe, and we should add it here.
5752 For now, we conservatively choose the latter. Either of the
5753 2 helps in crossjumping. */
5754 emit_use_return_register_into_block (bb
);
5756 emit_return_into_block (simple_p
, bb
);
5760 /* If we have a conditional jump branching to the last
5761 block, we can try to replace that with a conditional
5762 return instruction. */
5763 else if (condjump_p (jump
))
5768 dest
= simple_return_rtx
;
5771 if (!redirect_jump (jump
, dest
, 0))
5773 #ifdef HAVE_simple_return
5778 "Failed to redirect bb %d branch.\n", bb
->index
);
5779 unconverted
.safe_push (e
);
5785 /* See comment in simplejump_p case above. */
5786 emit_use_return_register_into_block (bb
);
5788 /* If this block has only one successor, it both jumps
5789 and falls through to the fallthru block, so we can't
5791 if (single_succ_p (bb
))
5796 #ifdef HAVE_simple_return
5801 "Failed to redirect bb %d branch.\n", bb
->index
);
5802 unconverted
.safe_push (e
);
5808 /* Fix up the CFG for the successful change we just made. */
5809 redirect_edge_succ (e
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
5810 e
->flags
&= ~EDGE_CROSSING
;
5816 /* Emit a return insn for the exit fallthru block. */
5818 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5820 basic_block last_bb
= exit_fallthru_edge
->src
;
5822 if (JUMP_P (BB_END (last_bb
)))
5824 last_bb
= split_edge (exit_fallthru_edge
);
5825 exit_fallthru_edge
= single_succ_edge (last_bb
);
5827 emit_barrier_after (BB_END (last_bb
));
5828 emit_return_into_block (simple_p
, last_bb
);
5829 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5835 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5836 this into place with notes indicating where the prologue ends and where
5837 the epilogue begins. Update the basic block information when possible.
5839 Notes on epilogue placement:
5840 There are several kinds of edges to the exit block:
5841 * a single fallthru edge from LAST_BB
5842 * possibly, edges from blocks containing sibcalls
5843 * possibly, fake edges from infinite loops
5845 The epilogue is always emitted on the fallthru edge from the last basic
5846 block in the function, LAST_BB, into the exit block.
5848 If LAST_BB is empty except for a label, it is the target of every
5849 other basic block in the function that ends in a return. If a
5850 target has a return or simple_return pattern (possibly with
5851 conditional variants), these basic blocks can be changed so that a
5852 return insn is emitted into them, and their target is adjusted to
5853 the real exit block.
5855 Notes on shrink wrapping: We implement a fairly conservative
5856 version of shrink-wrapping rather than the textbook one. We only
5857 generate a single prologue and a single epilogue. This is
5858 sufficient to catch a number of interesting cases involving early
5861 First, we identify the blocks that require the prologue to occur before
5862 them. These are the ones that modify a call-saved register, or reference
5863 any of the stack or frame pointer registers. To simplify things, we then
5864 mark everything reachable from these blocks as also requiring a prologue.
5865 This takes care of loops automatically, and avoids the need to examine
5866 whether MEMs reference the frame, since it is sufficient to check for
5867 occurrences of the stack or frame pointer.
5869 We then compute the set of blocks for which the need for a prologue
5870 is anticipatable (borrowing terminology from the shrink-wrapping
5871 description in Muchnick's book). These are the blocks which either
5872 require a prologue themselves, or those that have only successors
5873 where the prologue is anticipatable. The prologue needs to be
5874 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5875 is not. For the moment, we ensure that only one such edge exists.
5877 The epilogue is placed as described above, but we make a
5878 distinction between inserting return and simple_return patterns
5879 when modifying other blocks that end in a return. Blocks that end
5880 in a sibcall omit the sibcall_epilogue if the block is not in
5884 thread_prologue_and_epilogue_insns (void)
5887 #ifdef HAVE_simple_return
5888 vec
<edge
> unconverted_simple_returns
= vNULL
;
5889 bool nonempty_prologue
;
5890 bitmap_head bb_flags
;
5891 unsigned max_grow_size
;
5894 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5895 rtx prologue_seq ATTRIBUTE_UNUSED
, split_prologue_seq ATTRIBUTE_UNUSED
;
5896 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5901 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5905 epilogue_end
= NULL_RTX
;
5906 returnjump
= NULL_RTX
;
5908 /* Can't deal with multiple successors of the entry block at the
5909 moment. Function should always have at least one entry
5911 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5912 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5913 orig_entry_edge
= entry_edge
;
5915 split_prologue_seq
= NULL_RTX
;
5916 if (flag_split_stack
5917 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5920 #ifndef HAVE_split_stack_prologue
5923 gcc_assert (HAVE_split_stack_prologue
);
5926 emit_insn (gen_split_stack_prologue ());
5927 split_prologue_seq
= get_insns ();
5930 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5931 set_insn_locations (split_prologue_seq
, prologue_location
);
5935 prologue_seq
= NULL_RTX
;
5936 #ifdef HAVE_prologue
5940 seq
= gen_prologue ();
5943 /* Insert an explicit USE for the frame pointer
5944 if the profiling is on and the frame pointer is required. */
5945 if (crtl
->profile
&& frame_pointer_needed
)
5946 emit_use (hard_frame_pointer_rtx
);
5948 /* Retain a map of the prologue insns. */
5949 record_insns (seq
, NULL
, &prologue_insn_hash
);
5950 emit_note (NOTE_INSN_PROLOGUE_END
);
5952 /* Ensure that instructions are not moved into the prologue when
5953 profiling is on. The call to the profiling routine can be
5954 emitted within the live range of a call-clobbered register. */
5955 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5956 emit_insn (gen_blockage ());
5958 prologue_seq
= get_insns ();
5960 set_insn_locations (prologue_seq
, prologue_location
);
5964 #ifdef HAVE_simple_return
5965 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5967 /* Try to perform a kind of shrink-wrapping, making sure the
5968 prologue/epilogue is emitted only around those parts of the
5969 function that require it. */
5971 nonempty_prologue
= false;
5972 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
5973 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
5975 nonempty_prologue
= true;
5979 if (flag_shrink_wrap
&& HAVE_simple_return
5980 && (targetm
.profile_before_prologue () || !crtl
->profile
)
5981 && nonempty_prologue
&& !crtl
->calls_eh_return
)
5983 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
5984 struct hard_reg_set_container set_up_by_prologue
;
5986 vec
<basic_block
> vec
;
5988 bitmap_head bb_antic_flags
;
5989 bitmap_head bb_on_list
;
5990 bitmap_head bb_tail
;
5993 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
5995 /* Compute the registers set and used in the prologue. */
5996 CLEAR_HARD_REG_SET (prologue_clobbered
);
5997 CLEAR_HARD_REG_SET (prologue_used
);
5998 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
6000 HARD_REG_SET this_used
;
6001 if (!NONDEBUG_INSN_P (p_insn
))
6004 CLEAR_HARD_REG_SET (this_used
);
6005 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
6007 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
6008 IOR_HARD_REG_SET (prologue_used
, this_used
);
6009 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
6010 &prologue_clobbered
);
6013 prepare_shrink_wrap (entry_edge
->dest
);
6015 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
6016 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
6017 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
6019 /* Find the set of basic blocks that require a stack frame,
6020 and blocks that are too big to be duplicated. */
6022 vec
.create (n_basic_blocks_for_fn (cfun
));
6024 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
6025 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6026 STACK_POINTER_REGNUM
);
6027 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
6028 if (frame_pointer_needed
)
6029 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6030 HARD_FRAME_POINTER_REGNUM
);
6031 if (pic_offset_table_rtx
)
6032 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6033 PIC_OFFSET_TABLE_REGNUM
);
6035 add_to_hard_reg_set (&set_up_by_prologue
.set
,
6036 GET_MODE (crtl
->drap_reg
),
6037 REGNO (crtl
->drap_reg
));
6038 if (targetm
.set_up_by_prologue
)
6039 targetm
.set_up_by_prologue (&set_up_by_prologue
);
6041 /* We don't use a different max size depending on
6042 optimize_bb_for_speed_p because increasing shrink-wrapping
6043 opportunities by duplicating tail blocks can actually result
6044 in an overall decrease in code size. */
6045 max_grow_size
= get_uncond_jump_length ();
6046 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
6053 FOR_BB_INSNS (bb
, insn
)
6054 if (NONDEBUG_INSN_P (insn
))
6056 if (requires_stack_frame_p (insn
, prologue_used
,
6057 set_up_by_prologue
.set
))
6059 if (bb
== entry_edge
->dest
)
6060 goto fail_shrinkwrap
;
6061 bitmap_set_bit (&bb_flags
, bb
->index
);
6062 vec
.quick_push (bb
);
6065 else if (size
<= max_grow_size
)
6067 size
+= get_attr_min_length (insn
);
6068 if (size
> max_grow_size
)
6069 bitmap_set_bit (&bb_on_list
, bb
->index
);
6074 /* Blocks that really need a prologue, or are too big for tails. */
6075 bitmap_ior_into (&bb_on_list
, &bb_flags
);
6077 /* For every basic block that needs a prologue, mark all blocks
6078 reachable from it, so as to ensure they are also seen as
6079 requiring a prologue. */
6080 while (!vec
.is_empty ())
6082 basic_block tmp_bb
= vec
.pop ();
6084 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6085 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6086 && bitmap_set_bit (&bb_flags
, e
->dest
->index
))
6087 vec
.quick_push (e
->dest
);
6090 /* Find the set of basic blocks that need no prologue, have a
6091 single successor, can be duplicated, meet a max size
6092 requirement, and go to the exit via like blocks. */
6093 vec
.quick_push (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6094 while (!vec
.is_empty ())
6096 basic_block tmp_bb
= vec
.pop ();
6098 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6099 if (single_succ_p (e
->src
)
6100 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
6101 && can_duplicate_block_p (e
->src
))
6106 /* If there is predecessor of e->src which doesn't
6107 need prologue and the edge is complex,
6108 we might not be able to redirect the branch
6109 to a copy of e->src. */
6110 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
6111 if ((pe
->flags
& EDGE_COMPLEX
) != 0
6112 && !bitmap_bit_p (&bb_flags
, pe
->src
->index
))
6114 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
6115 vec
.quick_push (e
->src
);
6119 /* Now walk backwards from every block that is marked as needing
6120 a prologue to compute the bb_antic_flags bitmap. Exclude
6121 tail blocks; They can be duplicated to be used on paths not
6122 needing a prologue. */
6123 bitmap_clear (&bb_on_list
);
6124 bitmap_and_compl (&bb_antic_flags
, &bb_flags
, &bb_tail
);
6127 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6129 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6130 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6131 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6132 vec
.quick_push (e
->src
);
6134 while (!vec
.is_empty ())
6136 basic_block tmp_bb
= vec
.pop ();
6137 bool all_set
= true;
6139 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
6140 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6141 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
6149 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
6150 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6151 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6152 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6153 vec
.quick_push (e
->src
);
6156 /* Find exactly one edge that leads to a block in ANTIC from
6157 a block that isn't. */
6158 if (!bitmap_bit_p (&bb_antic_flags
, entry_edge
->dest
->index
))
6161 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6163 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6164 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
6166 if (entry_edge
!= orig_entry_edge
)
6168 entry_edge
= orig_entry_edge
;
6170 fprintf (dump_file
, "More than one candidate edge.\n");
6171 goto fail_shrinkwrap
;
6174 fprintf (dump_file
, "Found candidate edge for "
6175 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
6181 if (entry_edge
!= orig_entry_edge
)
6183 /* Test whether the prologue is known to clobber any register
6184 (other than FP or SP) which are live on the edge. */
6185 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
6186 if (frame_pointer_needed
)
6187 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
6188 REG_SET_TO_HARD_REG_SET (live_on_edge
,
6189 df_get_live_in (entry_edge
->dest
));
6190 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
6192 entry_edge
= orig_entry_edge
;
6195 "Shrink-wrapping aborted due to clobber.\n");
6198 if (entry_edge
!= orig_entry_edge
)
6200 crtl
->shrink_wrapped
= true;
6202 fprintf (dump_file
, "Performing shrink-wrapping.\n");
6204 /* Find tail blocks reachable from both blocks needing a
6205 prologue and blocks not needing a prologue. */
6206 if (!bitmap_empty_p (&bb_tail
))
6209 bool some_pro
, some_no_pro
;
6210 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
6212 some_pro
= some_no_pro
= false;
6213 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6215 if (bitmap_bit_p (&bb_flags
, e
->src
->index
))
6220 if (some_pro
&& some_no_pro
)
6221 vec
.quick_push (bb
);
6223 bitmap_clear_bit (&bb_tail
, bb
->index
);
6225 /* Find the head of each tail. */
6226 while (!vec
.is_empty ())
6228 basic_block tbb
= vec
.pop ();
6230 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
6233 while (single_succ_p (tbb
))
6235 tbb
= single_succ (tbb
);
6236 bitmap_clear_bit (&bb_tail
, tbb
->index
);
6239 /* Now duplicate the tails. */
6240 if (!bitmap_empty_p (&bb_tail
))
6241 FOR_EACH_BB_REVERSE (bb
)
6243 basic_block copy_bb
, tbb
;
6247 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
6250 /* Create a copy of BB, instructions and all, for
6251 use on paths that don't need a prologue.
6252 Ideal placement of the copy is on a fall-thru edge
6253 or after a block that would jump to the copy. */
6254 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6255 if (!bitmap_bit_p (&bb_flags
, e
->src
->index
)
6256 && single_succ_p (e
->src
))
6260 /* Make sure we insert after any barriers. */
6261 rtx end
= get_last_bb_insn (e
->src
);
6262 copy_bb
= create_basic_block (NEXT_INSN (end
),
6264 BB_COPY_PARTITION (copy_bb
, e
->src
);
6268 /* Otherwise put the copy at the end of the function. */
6269 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
6270 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6271 BB_COPY_PARTITION (copy_bb
, bb
);
6274 insert_point
= emit_note_after (NOTE_INSN_DELETED
,
6276 emit_barrier_after (BB_END (copy_bb
));
6281 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
6283 tbb
= single_succ (tbb
);
6284 if (tbb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
6286 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
6290 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6291 We have yet to add a simple_return to the tails,
6292 as we'd like to first convert_jumps_to_returns in
6293 case the block is no longer used after that. */
6295 if (CALL_P (PREV_INSN (insert_point
))
6296 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
6297 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
6298 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6301 /* verify_flow_info doesn't like a note after a
6303 delete_insn (insert_point
);
6304 if (bitmap_empty_p (&bb_tail
))
6310 bitmap_clear (&bb_tail
);
6311 bitmap_clear (&bb_antic_flags
);
6312 bitmap_clear (&bb_on_list
);
6317 if (split_prologue_seq
!= NULL_RTX
)
6319 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6322 if (prologue_seq
!= NULL_RTX
)
6324 insert_insn_on_edge (prologue_seq
, entry_edge
);
6328 /* If the exit block has no non-fake predecessors, we don't need
6330 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6331 if ((e
->flags
& EDGE_FAKE
) == 0)
6336 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6338 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6340 /* If we're allowed to generate a simple return instruction, then by
6341 definition we don't need a full epilogue. If the last basic
6342 block before the exit block does not contain active instructions,
6343 examine its predecessors and try to emit (conditional) return
6345 #ifdef HAVE_simple_return
6346 if (entry_edge
!= orig_entry_edge
)
6352 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6353 (but won't remove). Stop at end of current preds. */
6354 last
= EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6355 for (i
= 0; i
< last
; i
++)
6357 e
= EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
, i
);
6358 if (LABEL_P (BB_HEAD (e
->src
))
6359 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
6360 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
6361 unconverted_simple_returns
6362 = convert_jumps_to_returns (e
->src
, true,
6363 unconverted_simple_returns
);
6367 if (exit_fallthru_edge
!= NULL
6368 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
6369 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
6371 basic_block last_bb
;
6373 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
6374 returnjump
= BB_END (last_bb
);
6375 exit_fallthru_edge
= NULL
;
6382 if (exit_fallthru_edge
== NULL
)
6387 basic_block last_bb
= exit_fallthru_edge
->src
;
6389 if (LABEL_P (BB_HEAD (last_bb
))
6390 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
6391 convert_jumps_to_returns (last_bb
, false, vNULL
);
6393 if (EDGE_COUNT (last_bb
->preds
) != 0
6394 && single_succ_p (last_bb
))
6396 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
6397 epilogue_end
= returnjump
= BB_END (last_bb
);
6398 #ifdef HAVE_simple_return
6399 /* Emitting the return may add a basic block.
6400 Fix bb_flags for the added block. */
6401 if (last_bb
!= exit_fallthru_edge
->src
)
6402 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6410 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6411 this marker for the splits of EH_RETURN patterns, and nothing else
6412 uses the flag in the meantime. */
6413 epilogue_completed
= 1;
6415 #ifdef HAVE_eh_return
6416 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6417 some targets, these get split to a special version of the epilogue
6418 code. In order to be able to properly annotate these with unwind
6419 info, try to split them now. If we get a valid split, drop an
6420 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6421 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6423 rtx prev
, last
, trial
;
6425 if (e
->flags
& EDGE_FALLTHRU
)
6427 last
= BB_END (e
->src
);
6428 if (!eh_returnjump_p (last
))
6431 prev
= PREV_INSN (last
);
6432 trial
= try_split (PATTERN (last
), last
, 1);
6436 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6437 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6441 /* If nothing falls through into the exit block, we don't need an
6444 if (exit_fallthru_edge
== NULL
)
6447 #ifdef HAVE_epilogue
6451 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6452 seq
= gen_epilogue ();
6454 emit_jump_insn (seq
);
6456 /* Retain a map of the epilogue insns. */
6457 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6458 set_insn_locations (seq
, epilogue_location
);
6461 returnjump
= get_last_insn ();
6464 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6467 if (JUMP_P (returnjump
))
6468 set_return_jump_label (returnjump
);
6475 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6477 /* We have a fall-through edge to the exit block, the source is not
6478 at the end of the function, and there will be an assembler epilogue
6479 at the end of the function.
6480 We can't use force_nonfallthru here, because that would try to
6481 use return. Inserting a jump 'by hand' is extremely messy, so
6482 we take advantage of cfg_layout_finalize using
6483 fixup_fallthru_exit_predecessor. */
6484 cfg_layout_initialize (0);
6485 FOR_EACH_BB (cur_bb
)
6486 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6487 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6488 cur_bb
->aux
= cur_bb
->next_bb
;
6489 cfg_layout_finalize ();
6494 default_rtl_profile ();
6500 commit_edge_insertions ();
6502 /* Look for basic blocks within the prologue insns. */
6503 blocks
= sbitmap_alloc (last_basic_block
);
6504 bitmap_clear (blocks
);
6505 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6506 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6507 find_many_sub_basic_blocks (blocks
);
6508 sbitmap_free (blocks
);
6510 /* The epilogue insns we inserted may cause the exit edge to no longer
6512 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6514 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6515 && returnjump_p (BB_END (e
->src
)))
6516 e
->flags
&= ~EDGE_FALLTHRU
;
6520 #ifdef HAVE_simple_return
6521 /* If there were branches to an empty LAST_BB which we tried to
6522 convert to conditional simple_returns, but couldn't for some
6523 reason, create a block to hold a simple_return insn and redirect
6524 those remaining edges. */
6525 if (!unconverted_simple_returns
.is_empty ())
6527 basic_block simple_return_block_hot
= NULL
;
6528 basic_block simple_return_block_cold
= NULL
;
6529 edge pending_edge_hot
= NULL
;
6530 edge pending_edge_cold
= NULL
;
6531 basic_block exit_pred
;
6534 gcc_assert (entry_edge
!= orig_entry_edge
);
6536 /* See if we can reuse the last insn that was emitted for the
6538 if (returnjump
!= NULL_RTX
6539 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
6541 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
6542 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6543 simple_return_block_hot
= e
->dest
;
6545 simple_return_block_cold
= e
->dest
;
6548 /* Also check returns we might need to add to tail blocks. */
6549 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6550 if (EDGE_COUNT (e
->src
->preds
) != 0
6551 && (e
->flags
& EDGE_FAKE
) != 0
6552 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6554 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6555 pending_edge_hot
= e
;
6557 pending_edge_cold
= e
;
6560 /* Save a pointer to the exit's predecessor BB for use in
6561 inserting new BBs at the end of the function. Do this
6562 after the call to split_block above which may split
6563 the original exit pred. */
6564 exit_pred
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
6566 FOR_EACH_VEC_ELT (unconverted_simple_returns
, i
, e
)
6568 basic_block
*pdest_bb
;
6571 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6573 pdest_bb
= &simple_return_block_hot
;
6574 pending
= pending_edge_hot
;
6578 pdest_bb
= &simple_return_block_cold
;
6579 pending
= pending_edge_cold
;
6582 if (*pdest_bb
== NULL
&& pending
!= NULL
)
6584 emit_return_into_block (true, pending
->src
);
6585 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6586 *pdest_bb
= pending
->src
;
6588 else if (*pdest_bb
== NULL
)
6593 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
6594 BB_COPY_PARTITION (bb
, e
->src
);
6595 start
= emit_jump_insn_after (gen_simple_return (),
6597 JUMP_LABEL (start
) = simple_return_rtx
;
6598 emit_barrier_after (start
);
6601 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
6603 redirect_edge_and_branch_force (e
, *pdest_bb
);
6605 unconverted_simple_returns
.release ();
6608 if (entry_edge
!= orig_entry_edge
)
6610 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6611 if (EDGE_COUNT (e
->src
->preds
) != 0
6612 && (e
->flags
& EDGE_FAKE
) != 0
6613 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6615 emit_return_into_block (true, e
->src
);
6616 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6621 #ifdef HAVE_sibcall_epilogue
6622 /* Emit sibling epilogues before any sibling call sites. */
6623 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
); (e
=
6627 basic_block bb
= e
->src
;
6628 rtx insn
= BB_END (bb
);
6632 || ! SIBLING_CALL_P (insn
)
6633 #ifdef HAVE_simple_return
6634 || (entry_edge
!= orig_entry_edge
6635 && !bitmap_bit_p (&bb_flags
, bb
->index
))
6643 ep_seq
= gen_sibcall_epilogue ();
6647 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6652 /* Retain a map of the epilogue insns. Used in life analysis to
6653 avoid getting rid of sibcall epilogue insns. Do this before we
6654 actually emit the sequence. */
6655 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6656 set_insn_locations (seq
, epilogue_location
);
6658 emit_insn_before (seq
, insn
);
6664 #ifdef HAVE_epilogue
6669 /* Similarly, move any line notes that appear after the epilogue.
6670 There is no need, however, to be quite so anal about the existence
6671 of such a note. Also possibly move
6672 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6674 for (insn
= epilogue_end
; insn
; insn
= next
)
6676 next
= NEXT_INSN (insn
);
6678 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6679 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6684 #ifdef HAVE_simple_return
6685 bitmap_clear (&bb_flags
);
6688 /* Threading the prologue and epilogue changes the artificial refs
6689 in the entry and exit blocks. */
6690 epilogue_completed
= 1;
6691 df_update_entry_exit_and_calls ();
6694 /* Reposition the prologue-end and epilogue-begin notes after
6695 instruction scheduling. */
6698 reposition_prologue_and_epilogue_notes (void)
6700 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6701 || defined (HAVE_sibcall_epilogue)
6702 /* Since the hash table is created on demand, the fact that it is
6703 non-null is a signal that it is non-empty. */
6704 if (prologue_insn_hash
!= NULL
)
6706 size_t len
= htab_elements (prologue_insn_hash
);
6707 rtx insn
, last
= NULL
, note
= NULL
;
6709 /* Scan from the beginning until we reach the last prologue insn. */
6710 /* ??? While we do have the CFG intact, there are two problems:
6711 (1) The prologue can contain loops (typically probing the stack),
6712 which means that the end of the prologue isn't in the first bb.
6713 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6714 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6718 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6721 else if (contains (insn
, prologue_insn_hash
))
6733 /* Scan forward looking for the PROLOGUE_END note. It should
6734 be right at the beginning of the block, possibly with other
6735 insn notes that got moved there. */
6736 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6739 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6744 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6746 last
= NEXT_INSN (last
);
6747 reorder_insns (note
, note
, last
);
6751 if (epilogue_insn_hash
!= NULL
)
6756 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6758 rtx insn
, first
= NULL
, note
= NULL
;
6759 basic_block bb
= e
->src
;
6761 /* Scan from the beginning until we reach the first epilogue insn. */
6762 FOR_BB_INSNS (bb
, insn
)
6766 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6773 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6783 /* If the function has a single basic block, and no real
6784 epilogue insns (e.g. sibcall with no cleanup), the
6785 epilogue note can get scheduled before the prologue
6786 note. If we have frame related prologue insns, having
6787 them scanned during the epilogue will result in a crash.
6788 In this case re-order the epilogue note to just before
6789 the last insn in the block. */
6791 first
= BB_END (bb
);
6793 if (PREV_INSN (first
) != note
)
6794 reorder_insns (note
, note
, PREV_INSN (first
));
6798 #endif /* HAVE_prologue or HAVE_epilogue */
6801 /* Returns the name of function declared by FNDECL. */
6803 fndecl_name (tree fndecl
)
6807 return lang_hooks
.decl_printable_name (fndecl
, 2);
6810 /* Returns the name of function FN. */
6812 function_name (struct function
*fn
)
6814 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6815 return fndecl_name (fndecl
);
6818 /* Returns the name of the current function. */
6820 current_function_name (void)
6822 return function_name (cfun
);
6827 rest_of_handle_check_leaf_regs (void)
6829 #ifdef LEAF_REGISTERS
6830 crtl
->uses_only_leaf_regs
6831 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6836 /* Insert a TYPE into the used types hash table of CFUN. */
6839 used_types_insert_helper (tree type
, struct function
*func
)
6841 if (type
!= NULL
&& func
!= NULL
)
6845 if (func
->used_types_hash
== NULL
)
6846 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
6847 htab_eq_pointer
, NULL
);
6848 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
6854 /* Given a type, insert it into the used hash table in cfun. */
6856 used_types_insert (tree t
)
6858 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6863 if (TREE_CODE (t
) == ERROR_MARK
)
6865 if (TYPE_NAME (t
) == NULL_TREE
6866 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6867 t
= TYPE_MAIN_VARIANT (t
);
6868 if (debug_info_level
> DINFO_LEVEL_NONE
)
6871 used_types_insert_helper (t
, cfun
);
6874 /* So this might be a type referenced by a global variable.
6875 Record that type so that we can later decide to emit its
6876 debug information. */
6877 vec_safe_push (types_used_by_cur_var_decl
, t
);
6882 /* Helper to Hash a struct types_used_by_vars_entry. */
6885 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6887 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6889 return iterative_hash_object (entry
->type
,
6890 iterative_hash_object (entry
->var_decl
, 0));
6893 /* Hash function of the types_used_by_vars_entry hash table. */
6896 types_used_by_vars_do_hash (const void *x
)
6898 const struct types_used_by_vars_entry
*entry
=
6899 (const struct types_used_by_vars_entry
*) x
;
6901 return hash_types_used_by_vars_entry (entry
);
6904 /*Equality function of the types_used_by_vars_entry hash table. */
6907 types_used_by_vars_eq (const void *x1
, const void *x2
)
6909 const struct types_used_by_vars_entry
*e1
=
6910 (const struct types_used_by_vars_entry
*) x1
;
6911 const struct types_used_by_vars_entry
*e2
=
6912 (const struct types_used_by_vars_entry
*)x2
;
6914 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6917 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6920 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6922 if (type
!= NULL
&& var_decl
!= NULL
)
6925 struct types_used_by_vars_entry e
;
6926 e
.var_decl
= var_decl
;
6928 if (types_used_by_vars_hash
== NULL
)
6929 types_used_by_vars_hash
=
6930 htab_create_ggc (37, types_used_by_vars_do_hash
,
6931 types_used_by_vars_eq
, NULL
);
6932 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
6933 hash_types_used_by_vars_entry (&e
), INSERT
);
6936 struct types_used_by_vars_entry
*entry
;
6937 entry
= ggc_alloc_types_used_by_vars_entry ();
6939 entry
->var_decl
= var_decl
;
6947 const pass_data pass_data_leaf_regs
=
6949 RTL_PASS
, /* type */
6950 "*leaf_regs", /* name */
6951 OPTGROUP_NONE
, /* optinfo_flags */
6952 false, /* has_gate */
6953 true, /* has_execute */
6954 TV_NONE
, /* tv_id */
6955 0, /* properties_required */
6956 0, /* properties_provided */
6957 0, /* properties_destroyed */
6958 0, /* todo_flags_start */
6959 0, /* todo_flags_finish */
6962 class pass_leaf_regs
: public rtl_opt_pass
6965 pass_leaf_regs (gcc::context
*ctxt
)
6966 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6969 /* opt_pass methods: */
6970 unsigned int execute () { return rest_of_handle_check_leaf_regs (); }
6972 }; // class pass_leaf_regs
6977 make_pass_leaf_regs (gcc::context
*ctxt
)
6979 return new pass_leaf_regs (ctxt
);
6983 rest_of_handle_thread_prologue_and_epilogue (void)
6986 cleanup_cfg (CLEANUP_EXPENSIVE
);
6988 /* On some machines, the prologue and epilogue code, or parts thereof,
6989 can be represented as RTL. Doing so lets us schedule insns between
6990 it and the rest of the code and also allows delayed branch
6991 scheduling to operate in the epilogue. */
6992 thread_prologue_and_epilogue_insns ();
6994 /* The stack usage info is finalized during prologue expansion. */
6995 if (flag_stack_usage_info
)
6996 output_stack_usage ();
7003 const pass_data pass_data_thread_prologue_and_epilogue
=
7005 RTL_PASS
, /* type */
7006 "pro_and_epilogue", /* name */
7007 OPTGROUP_NONE
, /* optinfo_flags */
7008 false, /* has_gate */
7009 true, /* has_execute */
7010 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
7011 0, /* properties_required */
7012 0, /* properties_provided */
7013 0, /* properties_destroyed */
7014 TODO_verify_flow
, /* todo_flags_start */
7015 ( TODO_df_verify
| TODO_df_finish
7016 | TODO_verify_rtl_sharing
), /* todo_flags_finish */
7019 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
7022 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7023 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
7026 /* opt_pass methods: */
7027 unsigned int execute () {
7028 return rest_of_handle_thread_prologue_and_epilogue ();
7031 }; // class pass_thread_prologue_and_epilogue
7036 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7038 return new pass_thread_prologue_and_epilogue (ctxt
);
7042 /* This mini-pass fixes fall-out from SSA in asm statements that have
7043 in-out constraints. Say you start with
7046 asm ("": "+mr" (inout));
7049 which is transformed very early to use explicit output and match operands:
7052 asm ("": "=mr" (inout) : "0" (inout));
7055 Or, after SSA and copyprop,
7057 asm ("": "=mr" (inout_2) : "0" (inout_1));
7060 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7061 they represent two separate values, so they will get different pseudo
7062 registers during expansion. Then, since the two operands need to match
7063 per the constraints, but use different pseudo registers, reload can
7064 only register a reload for these operands. But reloads can only be
7065 satisfied by hardregs, not by memory, so we need a register for this
7066 reload, just because we are presented with non-matching operands.
7067 So, even though we allow memory for this operand, no memory can be
7068 used for it, just because the two operands don't match. This can
7069 cause reload failures on register-starved targets.
7071 So it's a symptom of reload not being able to use memory for reloads
7072 or, alternatively it's also a symptom of both operands not coming into
7073 reload as matching (in which case the pseudo could go to memory just
7074 fine, as the alternative allows it, and no reload would be necessary).
7075 We fix the latter problem here, by transforming
7077 asm ("": "=mr" (inout_2) : "0" (inout_1));
7082 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7085 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
7088 bool changed
= false;
7089 rtx op
= SET_SRC (p_sets
[0]);
7090 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
7091 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
7092 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
7094 memset (output_matched
, 0, noutputs
* sizeof (bool));
7095 for (i
= 0; i
< ninputs
; i
++)
7097 rtx input
, output
, insns
;
7098 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
7102 if (*constraint
== '%')
7105 match
= strtoul (constraint
, &end
, 10);
7106 if (end
== constraint
)
7109 gcc_assert (match
< noutputs
);
7110 output
= SET_DEST (p_sets
[match
]);
7111 input
= RTVEC_ELT (inputs
, i
);
7112 /* Only do the transformation for pseudos. */
7113 if (! REG_P (output
)
7114 || rtx_equal_p (output
, input
)
7115 || (GET_MODE (input
) != VOIDmode
7116 && GET_MODE (input
) != GET_MODE (output
)))
7119 /* We can't do anything if the output is also used as input,
7120 as we're going to overwrite it. */
7121 for (j
= 0; j
< ninputs
; j
++)
7122 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
7127 /* Avoid changing the same input several times. For
7128 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7129 only change in once (to out1), rather than changing it
7130 first to out1 and afterwards to out2. */
7133 for (j
= 0; j
< noutputs
; j
++)
7134 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
7139 output_matched
[match
] = true;
7142 emit_move_insn (output
, input
);
7143 insns
= get_insns ();
7145 emit_insn_before (insns
, insn
);
7147 /* Now replace all mentions of the input with output. We can't
7148 just replace the occurrence in inputs[i], as the register might
7149 also be used in some other input (or even in an address of an
7150 output), which would mean possibly increasing the number of
7151 inputs by one (namely 'output' in addition), which might pose
7152 a too complicated problem for reload to solve. E.g. this situation:
7154 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7156 Here 'input' is used in two occurrences as input (once for the
7157 input operand, once for the address in the second output operand).
7158 If we would replace only the occurrence of the input operand (to
7159 make the matching) we would be left with this:
7162 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7164 Now we suddenly have two different input values (containing the same
7165 value, but different pseudos) where we formerly had only one.
7166 With more complicated asms this might lead to reload failures
7167 which wouldn't have happen without this pass. So, iterate over
7168 all operands and replace all occurrences of the register used. */
7169 for (j
= 0; j
< noutputs
; j
++)
7170 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
7171 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
7172 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
7174 for (j
= 0; j
< ninputs
; j
++)
7175 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
7176 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
7183 df_insn_rescan (insn
);
7187 rest_of_match_asm_constraints (void)
7190 rtx insn
, pat
, *p_sets
;
7193 if (!crtl
->has_asm_statement
)
7196 df_set_flags (DF_DEFER_INSN_RESCAN
);
7199 FOR_BB_INSNS (bb
, insn
)
7204 pat
= PATTERN (insn
);
7205 if (GET_CODE (pat
) == PARALLEL
)
7206 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
7207 else if (GET_CODE (pat
) == SET
)
7208 p_sets
= &PATTERN (insn
), noutputs
= 1;
7212 if (GET_CODE (*p_sets
) == SET
7213 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
7214 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
7218 return TODO_df_finish
;
7223 const pass_data pass_data_match_asm_constraints
=
7225 RTL_PASS
, /* type */
7226 "asmcons", /* name */
7227 OPTGROUP_NONE
, /* optinfo_flags */
7228 false, /* has_gate */
7229 true, /* has_execute */
7230 TV_NONE
, /* tv_id */
7231 0, /* properties_required */
7232 0, /* properties_provided */
7233 0, /* properties_destroyed */
7234 0, /* todo_flags_start */
7235 0, /* todo_flags_finish */
7238 class pass_match_asm_constraints
: public rtl_opt_pass
7241 pass_match_asm_constraints (gcc::context
*ctxt
)
7242 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
7245 /* opt_pass methods: */
7246 unsigned int execute () { return rest_of_match_asm_constraints (); }
7248 }; // class pass_match_asm_constraints
7253 make_pass_match_asm_constraints (gcc::context
*ctxt
)
7255 return new pass_match_asm_constraints (ctxt
);
7259 #include "gt-function.h"