1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
51 #include "basic-block.h"
55 #include "langhooks.h"
57 #include "common/common-target.h"
59 #include "tree-pass.h"
63 #include "bb-reorder.h"
65 /* So we can assign to cfun in this file. */
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* Round a value to the lowest integer less than it that is a multiple of
75 the required alignment. Avoid using division in case the value is
76 negative. Assume the alignment is a power of two. */
77 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
79 /* Similar, but round to the next highest integer that meets the
81 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
83 /* Nonzero once virtual register instantiation has been done.
84 assign_stack_local uses frame_pointer_rtx when this is nonzero.
85 calls.c:emit_library_call_value_1 uses it to set up
86 post-instantiation libcalls. */
87 int virtuals_instantiated
;
89 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
90 static GTY(()) int funcdef_no
;
92 /* These variables hold pointers to functions to create and destroy
93 target specific, per-function data structures. */
94 struct machine_function
* (*init_machine_status
) (void);
96 /* The currently compiled function. */
97 struct function
*cfun
= 0;
99 /* These hashes record the prologue and epilogue insns. */
100 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
101 htab_t prologue_insn_hash
;
102 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
103 htab_t epilogue_insn_hash
;
106 htab_t types_used_by_vars_hash
= NULL
;
107 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
109 /* Forward declarations. */
111 static struct temp_slot
*find_temp_slot_from_address (rtx
);
112 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
113 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
114 static void reorder_blocks_1 (rtx
, tree
, vec
<tree
> *);
115 static int all_blocks (tree
, tree
*);
116 static tree
*get_block_vector (tree
, int *);
117 extern tree
debug_find_var_in_block_tree (tree
, tree
);
118 /* We always define `record_insns' even if it's not used so that we
119 can always export `prologue_epilogue_contains'. */
120 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
121 static bool contains (const_rtx
, htab_t
);
122 static void prepare_function_start (void);
123 static void do_clobber_return_reg (rtx
, void *);
124 static void do_use_return_reg (rtx
, void *);
126 /* Stack of nested functions. */
127 /* Keep track of the cfun stack. */
129 typedef struct function
*function_p
;
131 static vec
<function_p
> function_context_stack
;
133 /* Save the current context for compilation of a nested function.
134 This is called from language-specific code. */
137 push_function_context (void)
140 allocate_struct_function (NULL
, false);
142 function_context_stack
.safe_push (cfun
);
146 /* Restore the last saved context, at the end of a nested function.
147 This function is called from language-specific code. */
150 pop_function_context (void)
152 struct function
*p
= function_context_stack
.pop ();
154 current_function_decl
= p
->decl
;
156 /* Reset variables that have known state during rtx generation. */
157 virtuals_instantiated
= 0;
158 generating_concat_p
= 1;
161 /* Clear out all parts of the state in F that can safely be discarded
162 after the function has been parsed, but not compiled, to let
163 garbage collection reclaim the memory. */
166 free_after_parsing (struct function
*f
)
171 /* Clear out all parts of the state in F that can safely be discarded
172 after the function has been compiled, to let garbage collection
173 reclaim the memory. */
176 free_after_compilation (struct function
*f
)
178 prologue_insn_hash
= NULL
;
179 epilogue_insn_hash
= NULL
;
181 free (crtl
->emit
.regno_pointer_align
);
183 memset (crtl
, 0, sizeof (struct rtl_data
));
188 regno_reg_rtx
= NULL
;
191 /* Return size needed for stack frame based on slots so far allocated.
192 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
193 the caller may have to do that. */
196 get_frame_size (void)
198 if (FRAME_GROWS_DOWNWARD
)
199 return -frame_offset
;
204 /* Issue an error message and return TRUE if frame OFFSET overflows in
205 the signed target pointer arithmetics for function FUNC. Otherwise
209 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
211 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
213 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
214 /* Leave room for the fixed part of the frame. */
215 - 64 * UNITS_PER_WORD
)
217 error_at (DECL_SOURCE_LOCATION (func
),
218 "total size of local objects too large");
225 /* Return stack slot alignment in bits for TYPE and MODE. */
228 get_stack_local_alignment (tree type
, enum machine_mode mode
)
230 unsigned int alignment
;
233 alignment
= BIGGEST_ALIGNMENT
;
235 alignment
= GET_MODE_ALIGNMENT (mode
);
237 /* Allow the frond-end to (possibly) increase the alignment of this
240 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
242 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
245 /* Determine whether it is possible to fit a stack slot of size SIZE and
246 alignment ALIGNMENT into an area in the stack frame that starts at
247 frame offset START and has a length of LENGTH. If so, store the frame
248 offset to be used for the stack slot in *POFFSET and return true;
249 return false otherwise. This function will extend the frame size when
250 given a start/length pair that lies at the end of the frame. */
253 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
254 HOST_WIDE_INT size
, unsigned int alignment
,
255 HOST_WIDE_INT
*poffset
)
257 HOST_WIDE_INT this_frame_offset
;
258 int frame_off
, frame_alignment
, frame_phase
;
260 /* Calculate how many bytes the start of local variables is off from
262 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
263 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
264 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
266 /* Round the frame offset to the specified alignment. */
268 /* We must be careful here, since FRAME_OFFSET might be negative and
269 division with a negative dividend isn't as well defined as we might
270 like. So we instead assume that ALIGNMENT is a power of two and
271 use logical operations which are unambiguous. */
272 if (FRAME_GROWS_DOWNWARD
)
274 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
275 (unsigned HOST_WIDE_INT
) alignment
)
279 = (CEIL_ROUND (start
- frame_phase
,
280 (unsigned HOST_WIDE_INT
) alignment
)
283 /* See if it fits. If this space is at the edge of the frame,
284 consider extending the frame to make it fit. Our caller relies on
285 this when allocating a new slot. */
286 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
287 frame_offset
= this_frame_offset
;
288 else if (this_frame_offset
< start
)
290 else if (start
+ length
== frame_offset
291 && this_frame_offset
+ size
> start
+ length
)
292 frame_offset
= this_frame_offset
+ size
;
293 else if (this_frame_offset
+ size
> start
+ length
)
296 *poffset
= this_frame_offset
;
300 /* Create a new frame_space structure describing free space in the stack
301 frame beginning at START and ending at END, and chain it into the
302 function's frame_space_list. */
305 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
307 struct frame_space
*space
= ggc_alloc_frame_space ();
308 space
->next
= crtl
->frame_space_list
;
309 crtl
->frame_space_list
= space
;
310 space
->start
= start
;
311 space
->length
= end
- start
;
314 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
315 with machine mode MODE.
317 ALIGN controls the amount of alignment for the address of the slot:
318 0 means according to MODE,
319 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
320 -2 means use BITS_PER_UNIT,
321 positive specifies alignment boundary in bits.
323 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
324 alignment and ASLK_RECORD_PAD bit set if we should remember
325 extra space we allocated for alignment purposes. When we are
326 called from assign_stack_temp_for_type, it is not set so we don't
327 track the same stack slot in two independent lists.
329 We do not round to stack_boundary here. */
332 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
336 int bigend_correction
= 0;
337 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
338 unsigned int alignment
, alignment_in_bits
;
342 alignment
= get_stack_local_alignment (NULL
, mode
);
343 alignment
/= BITS_PER_UNIT
;
345 else if (align
== -1)
347 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
348 size
= CEIL_ROUND (size
, alignment
);
350 else if (align
== -2)
351 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
353 alignment
= align
/ BITS_PER_UNIT
;
355 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
357 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
358 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
360 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
361 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
364 if (SUPPORTS_STACK_ALIGNMENT
)
366 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
368 if (!crtl
->stack_realign_processed
)
369 crtl
->stack_alignment_estimated
= alignment_in_bits
;
372 /* If stack is realigned and stack alignment value
373 hasn't been finalized, it is OK not to increase
374 stack_alignment_estimated. The bigger alignment
375 requirement is recorded in stack_alignment_needed
377 gcc_assert (!crtl
->stack_realign_finalized
);
378 if (!crtl
->stack_realign_needed
)
380 /* It is OK to reduce the alignment as long as the
381 requested size is 0 or the estimated stack
382 alignment >= mode alignment. */
383 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
385 || (crtl
->stack_alignment_estimated
386 >= GET_MODE_ALIGNMENT (mode
)));
387 alignment_in_bits
= crtl
->stack_alignment_estimated
;
388 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
394 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
395 crtl
->stack_alignment_needed
= alignment_in_bits
;
396 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
397 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
399 if (mode
!= BLKmode
|| size
!= 0)
401 if (kind
& ASLK_RECORD_PAD
)
403 struct frame_space
**psp
;
405 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
407 struct frame_space
*space
= *psp
;
408 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
409 alignment
, &slot_offset
))
412 if (slot_offset
> space
->start
)
413 add_frame_space (space
->start
, slot_offset
);
414 if (slot_offset
+ size
< space
->start
+ space
->length
)
415 add_frame_space (slot_offset
+ size
,
416 space
->start
+ space
->length
);
421 else if (!STACK_ALIGNMENT_NEEDED
)
423 slot_offset
= frame_offset
;
427 old_frame_offset
= frame_offset
;
429 if (FRAME_GROWS_DOWNWARD
)
431 frame_offset
-= size
;
432 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
434 if (kind
& ASLK_RECORD_PAD
)
436 if (slot_offset
> frame_offset
)
437 add_frame_space (frame_offset
, slot_offset
);
438 if (slot_offset
+ size
< old_frame_offset
)
439 add_frame_space (slot_offset
+ size
, old_frame_offset
);
444 frame_offset
+= size
;
445 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
447 if (kind
& ASLK_RECORD_PAD
)
449 if (slot_offset
> old_frame_offset
)
450 add_frame_space (old_frame_offset
, slot_offset
);
451 if (slot_offset
+ size
< frame_offset
)
452 add_frame_space (slot_offset
+ size
, frame_offset
);
457 /* On a big-endian machine, if we are allocating more space than we will use,
458 use the least significant bytes of those that are allocated. */
459 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
460 bigend_correction
= size
- GET_MODE_SIZE (mode
);
462 /* If we have already instantiated virtual registers, return the actual
463 address relative to the frame pointer. */
464 if (virtuals_instantiated
)
465 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
467 (slot_offset
+ bigend_correction
468 + STARTING_FRAME_OFFSET
, Pmode
));
470 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
472 (slot_offset
+ bigend_correction
,
475 x
= gen_rtx_MEM (mode
, addr
);
476 set_mem_align (x
, alignment_in_bits
);
477 MEM_NOTRAP_P (x
) = 1;
480 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
482 if (frame_offset_overflow (frame_offset
, current_function_decl
))
488 /* Wrap up assign_stack_local_1 with last parameter as false. */
491 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
493 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
496 /* In order to evaluate some expressions, such as function calls returning
497 structures in memory, we need to temporarily allocate stack locations.
498 We record each allocated temporary in the following structure.
500 Associated with each temporary slot is a nesting level. When we pop up
501 one level, all temporaries associated with the previous level are freed.
502 Normally, all temporaries are freed after the execution of the statement
503 in which they were created. However, if we are inside a ({...}) grouping,
504 the result may be in a temporary and hence must be preserved. If the
505 result could be in a temporary, we preserve it if we can determine which
506 one it is in. If we cannot determine which temporary may contain the
507 result, all temporaries are preserved. A temporary is preserved by
508 pretending it was allocated at the previous nesting level. */
510 struct GTY(()) temp_slot
{
511 /* Points to next temporary slot. */
512 struct temp_slot
*next
;
513 /* Points to previous temporary slot. */
514 struct temp_slot
*prev
;
515 /* The rtx to used to reference the slot. */
517 /* The size, in units, of the slot. */
519 /* The type of the object in the slot, or zero if it doesn't correspond
520 to a type. We use this to determine whether a slot can be reused.
521 It can be reused if objects of the type of the new slot will always
522 conflict with objects of the type of the old slot. */
524 /* The alignment (in bits) of the slot. */
526 /* Nonzero if this temporary is currently in use. */
528 /* Nesting level at which this slot is being used. */
530 /* The offset of the slot from the frame_pointer, including extra space
531 for alignment. This info is for combine_temp_slots. */
532 HOST_WIDE_INT base_offset
;
533 /* The size of the slot, including extra space for alignment. This
534 info is for combine_temp_slots. */
535 HOST_WIDE_INT full_size
;
538 /* A table of addresses that represent a stack slot. The table is a mapping
539 from address RTXen to a temp slot. */
540 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
541 static size_t n_temp_slots_in_use
;
543 /* Entry for the above hash table. */
544 struct GTY(()) temp_slot_address_entry
{
547 struct temp_slot
*temp_slot
;
550 /* Removes temporary slot TEMP from LIST. */
553 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
556 temp
->next
->prev
= temp
->prev
;
558 temp
->prev
->next
= temp
->next
;
562 temp
->prev
= temp
->next
= NULL
;
565 /* Inserts temporary slot TEMP to LIST. */
568 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
572 (*list
)->prev
= temp
;
577 /* Returns the list of used temp slots at LEVEL. */
579 static struct temp_slot
**
580 temp_slots_at_level (int level
)
582 if (level
>= (int) vec_safe_length (used_temp_slots
))
583 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
585 return &(*used_temp_slots
)[level
];
588 /* Returns the maximal temporary slot level. */
591 max_slot_level (void)
593 if (!used_temp_slots
)
596 return used_temp_slots
->length () - 1;
599 /* Moves temporary slot TEMP to LEVEL. */
602 move_slot_to_level (struct temp_slot
*temp
, int level
)
604 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
605 insert_slot_to_list (temp
, temp_slots_at_level (level
));
609 /* Make temporary slot TEMP available. */
612 make_slot_available (struct temp_slot
*temp
)
614 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
615 insert_slot_to_list (temp
, &avail_temp_slots
);
618 n_temp_slots_in_use
--;
621 /* Compute the hash value for an address -> temp slot mapping.
622 The value is cached on the mapping entry. */
624 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
626 int do_not_record
= 0;
627 return hash_rtx (t
->address
, GET_MODE (t
->address
),
628 &do_not_record
, NULL
, false);
631 /* Return the hash value for an address -> temp slot mapping. */
633 temp_slot_address_hash (const void *p
)
635 const struct temp_slot_address_entry
*t
;
636 t
= (const struct temp_slot_address_entry
*) p
;
640 /* Compare two address -> temp slot mapping entries. */
642 temp_slot_address_eq (const void *p1
, const void *p2
)
644 const struct temp_slot_address_entry
*t1
, *t2
;
645 t1
= (const struct temp_slot_address_entry
*) p1
;
646 t2
= (const struct temp_slot_address_entry
*) p2
;
647 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
650 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
652 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
655 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
656 t
->address
= address
;
657 t
->temp_slot
= temp_slot
;
658 t
->hash
= temp_slot_address_compute_hash (t
);
659 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
663 /* Remove an address -> temp slot mapping entry if the temp slot is
664 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
666 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
668 const struct temp_slot_address_entry
*t
;
669 t
= (const struct temp_slot_address_entry
*) *slot
;
670 if (! t
->temp_slot
->in_use
)
671 htab_clear_slot (temp_slot_address_table
, slot
);
675 /* Remove all mappings of addresses to unused temp slots. */
677 remove_unused_temp_slot_addresses (void)
679 /* Use quicker clearing if there aren't any active temp slots. */
680 if (n_temp_slots_in_use
)
681 htab_traverse (temp_slot_address_table
,
682 remove_unused_temp_slot_addresses_1
,
685 htab_empty (temp_slot_address_table
);
688 /* Find the temp slot corresponding to the object at address X. */
690 static struct temp_slot
*
691 find_temp_slot_from_address (rtx x
)
694 struct temp_slot_address_entry tmp
, *t
;
696 /* First try the easy way:
697 See if X exists in the address -> temp slot mapping. */
699 tmp
.temp_slot
= NULL
;
700 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
701 t
= (struct temp_slot_address_entry
*)
702 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
706 /* If we have a sum involving a register, see if it points to a temp
708 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
709 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
711 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
712 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
715 /* Last resort: Address is a virtual stack var address. */
716 if (GET_CODE (x
) == PLUS
717 && XEXP (x
, 0) == virtual_stack_vars_rtx
718 && CONST_INT_P (XEXP (x
, 1)))
721 for (i
= max_slot_level (); i
>= 0; i
--)
722 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
724 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
725 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
733 /* Allocate a temporary stack slot and record it for possible later
736 MODE is the machine mode to be given to the returned rtx.
738 SIZE is the size in units of the space required. We do no rounding here
739 since assign_stack_local will do any required rounding.
741 TYPE is the type that will be used for the stack slot. */
744 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
748 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
751 /* If SIZE is -1 it means that somebody tried to allocate a temporary
752 of a variable size. */
753 gcc_assert (size
!= -1);
755 align
= get_stack_local_alignment (type
, mode
);
757 /* Try to find an available, already-allocated temporary of the proper
758 mode which meets the size and alignment requirements. Choose the
759 smallest one with the closest alignment.
761 If assign_stack_temp is called outside of the tree->rtl expansion,
762 we cannot reuse the stack slots (that may still refer to
763 VIRTUAL_STACK_VARS_REGNUM). */
764 if (!virtuals_instantiated
)
766 for (p
= avail_temp_slots
; p
; p
= p
->next
)
768 if (p
->align
>= align
&& p
->size
>= size
769 && GET_MODE (p
->slot
) == mode
770 && objects_must_conflict_p (p
->type
, type
)
771 && (best_p
== 0 || best_p
->size
> p
->size
772 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
774 if (p
->align
== align
&& p
->size
== size
)
777 cut_slot_from_list (selected
, &avail_temp_slots
);
786 /* Make our best, if any, the one to use. */
790 cut_slot_from_list (selected
, &avail_temp_slots
);
792 /* If there are enough aligned bytes left over, make them into a new
793 temp_slot so that the extra bytes don't get wasted. Do this only
794 for BLKmode slots, so that we can be sure of the alignment. */
795 if (GET_MODE (best_p
->slot
) == BLKmode
)
797 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
798 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
800 if (best_p
->size
- rounded_size
>= alignment
)
802 p
= ggc_alloc_temp_slot ();
804 p
->size
= best_p
->size
- rounded_size
;
805 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
806 p
->full_size
= best_p
->full_size
- rounded_size
;
807 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
808 p
->align
= best_p
->align
;
809 p
->type
= best_p
->type
;
810 insert_slot_to_list (p
, &avail_temp_slots
);
812 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
815 best_p
->size
= rounded_size
;
816 best_p
->full_size
= rounded_size
;
821 /* If we still didn't find one, make a new temporary. */
824 HOST_WIDE_INT frame_offset_old
= frame_offset
;
826 p
= ggc_alloc_temp_slot ();
828 /* We are passing an explicit alignment request to assign_stack_local.
829 One side effect of that is assign_stack_local will not round SIZE
830 to ensure the frame offset remains suitably aligned.
832 So for requests which depended on the rounding of SIZE, we go ahead
833 and round it now. We also make sure ALIGNMENT is at least
834 BIGGEST_ALIGNMENT. */
835 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
836 p
->slot
= assign_stack_local_1 (mode
,
846 /* The following slot size computation is necessary because we don't
847 know the actual size of the temporary slot until assign_stack_local
848 has performed all the frame alignment and size rounding for the
849 requested temporary. Note that extra space added for alignment
850 can be either above or below this stack slot depending on which
851 way the frame grows. We include the extra space if and only if it
852 is above this slot. */
853 if (FRAME_GROWS_DOWNWARD
)
854 p
->size
= frame_offset_old
- frame_offset
;
858 /* Now define the fields used by combine_temp_slots. */
859 if (FRAME_GROWS_DOWNWARD
)
861 p
->base_offset
= frame_offset
;
862 p
->full_size
= frame_offset_old
- frame_offset
;
866 p
->base_offset
= frame_offset_old
;
867 p
->full_size
= frame_offset
- frame_offset_old
;
876 p
->level
= temp_slot_level
;
877 n_temp_slots_in_use
++;
879 pp
= temp_slots_at_level (p
->level
);
880 insert_slot_to_list (p
, pp
);
881 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
883 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
884 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
885 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
887 /* If we know the alias set for the memory that will be used, use
888 it. If there's no TYPE, then we don't know anything about the
889 alias set for the memory. */
890 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
891 set_mem_align (slot
, align
);
893 /* If a type is specified, set the relevant flags. */
895 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
896 MEM_NOTRAP_P (slot
) = 1;
901 /* Allocate a temporary stack slot and record it for possible later
902 reuse. First two arguments are same as in preceding function. */
905 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
)
907 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
910 /* Assign a temporary.
911 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
912 and so that should be used in error messages. In either case, we
913 allocate of the given type.
914 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
915 it is 0 if a register is OK.
916 DONT_PROMOTE is 1 if we should not promote values in register
920 assign_temp (tree type_or_decl
, int memory_required
,
921 int dont_promote ATTRIBUTE_UNUSED
)
924 enum machine_mode mode
;
929 if (DECL_P (type_or_decl
))
930 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
932 decl
= NULL
, type
= type_or_decl
;
934 mode
= TYPE_MODE (type
);
936 unsignedp
= TYPE_UNSIGNED (type
);
939 if (mode
== BLKmode
|| memory_required
)
941 HOST_WIDE_INT size
= int_size_in_bytes (type
);
944 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
945 problems with allocating the stack space. */
949 /* Unfortunately, we don't yet know how to allocate variable-sized
950 temporaries. However, sometimes we can find a fixed upper limit on
951 the size, so try that instead. */
953 size
= max_int_size_in_bytes (type
);
955 /* The size of the temporary may be too large to fit into an integer. */
956 /* ??? Not sure this should happen except for user silliness, so limit
957 this to things that aren't compiler-generated temporaries. The
958 rest of the time we'll die in assign_stack_temp_for_type. */
959 if (decl
&& size
== -1
960 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
962 error ("size of variable %q+D is too large", decl
);
966 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
972 mode
= promote_mode (type
, mode
, &unsignedp
);
975 return gen_reg_rtx (mode
);
978 /* Combine temporary stack slots which are adjacent on the stack.
980 This allows for better use of already allocated stack space. This is only
981 done for BLKmode slots because we can be sure that we won't have alignment
982 problems in this case. */
985 combine_temp_slots (void)
987 struct temp_slot
*p
, *q
, *next
, *next_q
;
990 /* We can't combine slots, because the information about which slot
991 is in which alias set will be lost. */
992 if (flag_strict_aliasing
)
995 /* If there are a lot of temp slots, don't do anything unless
996 high levels of optimization. */
997 if (! flag_expensive_optimizations
)
998 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
999 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1002 for (p
= avail_temp_slots
; p
; p
= next
)
1008 if (GET_MODE (p
->slot
) != BLKmode
)
1011 for (q
= p
->next
; q
; q
= next_q
)
1017 if (GET_MODE (q
->slot
) != BLKmode
)
1020 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1022 /* Q comes after P; combine Q into P. */
1024 p
->full_size
+= q
->full_size
;
1027 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1029 /* P comes after Q; combine P into Q. */
1031 q
->full_size
+= p
->full_size
;
1036 cut_slot_from_list (q
, &avail_temp_slots
);
1039 /* Either delete P or advance past it. */
1041 cut_slot_from_list (p
, &avail_temp_slots
);
1045 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1046 slot that previously was known by OLD_RTX. */
1049 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1051 struct temp_slot
*p
;
1053 if (rtx_equal_p (old_rtx
, new_rtx
))
1056 p
= find_temp_slot_from_address (old_rtx
);
1058 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1059 NEW_RTX is a register, see if one operand of the PLUS is a
1060 temporary location. If so, NEW_RTX points into it. Otherwise,
1061 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1062 in common between them. If so, try a recursive call on those
1066 if (GET_CODE (old_rtx
) != PLUS
)
1069 if (REG_P (new_rtx
))
1071 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1072 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1075 else if (GET_CODE (new_rtx
) != PLUS
)
1078 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1079 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1080 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1081 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1082 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1083 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1084 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1085 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1090 /* Otherwise add an alias for the temp's address. */
1091 insert_temp_slot_address (new_rtx
, p
);
1094 /* If X could be a reference to a temporary slot, mark that slot as
1095 belonging to the to one level higher than the current level. If X
1096 matched one of our slots, just mark that one. Otherwise, we can't
1097 easily predict which it is, so upgrade all of them.
1099 This is called when an ({...}) construct occurs and a statement
1100 returns a value in memory. */
1103 preserve_temp_slots (rtx x
)
1105 struct temp_slot
*p
= 0, *next
;
1110 /* If X is a register that is being used as a pointer, see if we have
1111 a temporary slot we know it points to. */
1112 if (REG_P (x
) && REG_POINTER (x
))
1113 p
= find_temp_slot_from_address (x
);
1115 /* If X is not in memory or is at a constant address, it cannot be in
1116 a temporary slot. */
1117 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1120 /* First see if we can find a match. */
1122 p
= find_temp_slot_from_address (XEXP (x
, 0));
1126 if (p
->level
== temp_slot_level
)
1127 move_slot_to_level (p
, temp_slot_level
- 1);
1131 /* Otherwise, preserve all non-kept slots at this level. */
1132 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1135 move_slot_to_level (p
, temp_slot_level
- 1);
1139 /* Free all temporaries used so far. This is normally called at the
1140 end of generating code for a statement. */
1143 free_temp_slots (void)
1145 struct temp_slot
*p
, *next
;
1146 bool some_available
= false;
1148 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1151 make_slot_available (p
);
1152 some_available
= true;
1157 remove_unused_temp_slot_addresses ();
1158 combine_temp_slots ();
1162 /* Push deeper into the nesting level for stack temporaries. */
1165 push_temp_slots (void)
1170 /* Pop a temporary nesting level. All slots in use in the current level
1174 pop_temp_slots (void)
1180 /* Initialize temporary slots. */
1183 init_temp_slots (void)
1185 /* We have not allocated any temporaries yet. */
1186 avail_temp_slots
= 0;
1187 vec_alloc (used_temp_slots
, 0);
1188 temp_slot_level
= 0;
1189 n_temp_slots_in_use
= 0;
1191 /* Set up the table to map addresses to temp slots. */
1192 if (! temp_slot_address_table
)
1193 temp_slot_address_table
= htab_create_ggc (32,
1194 temp_slot_address_hash
,
1195 temp_slot_address_eq
,
1198 htab_empty (temp_slot_address_table
);
1201 /* Functions and data structures to keep track of the values hard regs
1202 had at the start of the function. */
1204 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1205 and has_hard_reg_initial_val.. */
1206 typedef struct GTY(()) initial_value_pair
{
1209 } initial_value_pair
;
1210 /* ??? This could be a VEC but there is currently no way to define an
1211 opaque VEC type. This could be worked around by defining struct
1212 initial_value_pair in function.h. */
1213 typedef struct GTY(()) initial_value_struct
{
1216 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1217 } initial_value_struct
;
1219 /* If a pseudo represents an initial hard reg (or expression), return
1220 it, else return NULL_RTX. */
1223 get_hard_reg_initial_reg (rtx reg
)
1225 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1231 for (i
= 0; i
< ivs
->num_entries
; i
++)
1232 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1233 return ivs
->entries
[i
].hard_reg
;
1238 /* Make sure that there's a pseudo register of mode MODE that stores the
1239 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1242 get_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1244 struct initial_value_struct
*ivs
;
1247 rv
= has_hard_reg_initial_val (mode
, regno
);
1251 ivs
= crtl
->hard_reg_initial_vals
;
1254 ivs
= ggc_alloc_initial_value_struct ();
1255 ivs
->num_entries
= 0;
1256 ivs
->max_entries
= 5;
1257 ivs
->entries
= ggc_alloc_vec_initial_value_pair (5);
1258 crtl
->hard_reg_initial_vals
= ivs
;
1261 if (ivs
->num_entries
>= ivs
->max_entries
)
1263 ivs
->max_entries
+= 5;
1264 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1268 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1269 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1271 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1274 /* See if get_hard_reg_initial_val has been used to create a pseudo
1275 for the initial value of hard register REGNO in mode MODE. Return
1276 the associated pseudo if so, otherwise return NULL. */
1279 has_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1281 struct initial_value_struct
*ivs
;
1284 ivs
= crtl
->hard_reg_initial_vals
;
1286 for (i
= 0; i
< ivs
->num_entries
; i
++)
1287 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1288 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1289 return ivs
->entries
[i
].pseudo
;
1295 emit_initial_value_sets (void)
1297 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1305 for (i
= 0; i
< ivs
->num_entries
; i
++)
1306 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1310 emit_insn_at_entry (seq
);
1314 /* Return the hardreg-pseudoreg initial values pair entry I and
1315 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1317 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1319 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1320 if (!ivs
|| i
>= ivs
->num_entries
)
1323 *hreg
= ivs
->entries
[i
].hard_reg
;
1324 *preg
= ivs
->entries
[i
].pseudo
;
1328 /* These routines are responsible for converting virtual register references
1329 to the actual hard register references once RTL generation is complete.
1331 The following four variables are used for communication between the
1332 routines. They contain the offsets of the virtual registers from their
1333 respective hard registers. */
1335 static int in_arg_offset
;
1336 static int var_offset
;
1337 static int dynamic_offset
;
1338 static int out_arg_offset
;
1339 static int cfa_offset
;
1341 /* In most machines, the stack pointer register is equivalent to the bottom
1344 #ifndef STACK_POINTER_OFFSET
1345 #define STACK_POINTER_OFFSET 0
1348 /* If not defined, pick an appropriate default for the offset of dynamically
1349 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1350 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1352 #ifndef STACK_DYNAMIC_OFFSET
1354 /* The bottom of the stack points to the actual arguments. If
1355 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1356 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1357 stack space for register parameters is not pushed by the caller, but
1358 rather part of the fixed stack areas and hence not included in
1359 `crtl->outgoing_args_size'. Nevertheless, we must allow
1360 for it when allocating stack dynamic objects. */
1362 #if defined(REG_PARM_STACK_SPACE)
1363 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1364 ((ACCUMULATE_OUTGOING_ARGS \
1365 ? (crtl->outgoing_args_size \
1366 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1367 : REG_PARM_STACK_SPACE (FNDECL))) \
1368 : 0) + (STACK_POINTER_OFFSET))
1370 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1371 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1372 + (STACK_POINTER_OFFSET))
1377 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1378 is a virtual register, return the equivalent hard register and set the
1379 offset indirectly through the pointer. Otherwise, return 0. */
1382 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1385 HOST_WIDE_INT offset
;
1387 if (x
== virtual_incoming_args_rtx
)
1389 if (stack_realign_drap
)
1391 /* Replace virtual_incoming_args_rtx with internal arg
1392 pointer if DRAP is used to realign stack. */
1393 new_rtx
= crtl
->args
.internal_arg_pointer
;
1397 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1399 else if (x
== virtual_stack_vars_rtx
)
1400 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1401 else if (x
== virtual_stack_dynamic_rtx
)
1402 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1403 else if (x
== virtual_outgoing_args_rtx
)
1404 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1405 else if (x
== virtual_cfa_rtx
)
1407 #ifdef FRAME_POINTER_CFA_OFFSET
1408 new_rtx
= frame_pointer_rtx
;
1410 new_rtx
= arg_pointer_rtx
;
1412 offset
= cfa_offset
;
1414 else if (x
== virtual_preferred_stack_boundary_rtx
)
1416 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1426 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1427 Instantiate any virtual registers present inside of *LOC. The expression
1428 is simplified, as much as possible, but is not to be considered "valid"
1429 in any sense implied by the target. If any change is made, set CHANGED
1433 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1435 HOST_WIDE_INT offset
;
1436 bool *changed
= (bool *) data
;
1443 switch (GET_CODE (x
))
1446 new_rtx
= instantiate_new_reg (x
, &offset
);
1449 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1456 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1459 new_rtx
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1460 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1466 /* FIXME -- from old code */
1467 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1468 we can commute the PLUS and SUBREG because pointers into the
1469 frame are well-behaved. */
1479 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1480 matches the predicate for insn CODE operand OPERAND. */
1483 safe_insn_predicate (int code
, int operand
, rtx x
)
1485 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1488 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1489 registers present inside of insn. The result will be a valid insn. */
1492 instantiate_virtual_regs_in_insn (rtx insn
)
1494 HOST_WIDE_INT offset
;
1496 bool any_change
= false;
1497 rtx set
, new_rtx
, x
, seq
;
1499 /* There are some special cases to be handled first. */
1500 set
= single_set (insn
);
1503 /* We're allowed to assign to a virtual register. This is interpreted
1504 to mean that the underlying register gets assigned the inverse
1505 transformation. This is used, for example, in the handling of
1507 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1512 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1513 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1514 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1515 x
= force_operand (x
, new_rtx
);
1517 emit_move_insn (new_rtx
, x
);
1522 emit_insn_before (seq
, insn
);
1527 /* Handle a straight copy from a virtual register by generating a
1528 new add insn. The difference between this and falling through
1529 to the generic case is avoiding a new pseudo and eliminating a
1530 move insn in the initial rtl stream. */
1531 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1532 if (new_rtx
&& offset
!= 0
1533 && REG_P (SET_DEST (set
))
1534 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1538 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1539 gen_int_mode (offset
,
1540 GET_MODE (SET_DEST (set
))),
1541 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1542 if (x
!= SET_DEST (set
))
1543 emit_move_insn (SET_DEST (set
), x
);
1548 emit_insn_before (seq
, insn
);
1553 extract_insn (insn
);
1554 insn_code
= INSN_CODE (insn
);
1556 /* Handle a plus involving a virtual register by determining if the
1557 operands remain valid if they're modified in place. */
1558 if (GET_CODE (SET_SRC (set
)) == PLUS
1559 && recog_data
.n_operands
>= 3
1560 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1561 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1562 && CONST_INT_P (recog_data
.operand
[2])
1563 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1565 offset
+= INTVAL (recog_data
.operand
[2]);
1567 /* If the sum is zero, then replace with a plain move. */
1569 && REG_P (SET_DEST (set
))
1570 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1573 emit_move_insn (SET_DEST (set
), new_rtx
);
1577 emit_insn_before (seq
, insn
);
1582 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1584 /* Using validate_change and apply_change_group here leaves
1585 recog_data in an invalid state. Since we know exactly what
1586 we want to check, do those two by hand. */
1587 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1588 && safe_insn_predicate (insn_code
, 2, x
))
1590 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1591 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1594 /* Fall through into the regular operand fixup loop in
1595 order to take care of operands other than 1 and 2. */
1601 extract_insn (insn
);
1602 insn_code
= INSN_CODE (insn
);
1605 /* In the general case, we expect virtual registers to appear only in
1606 operands, and then only as either bare registers or inside memories. */
1607 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1609 x
= recog_data
.operand
[i
];
1610 switch (GET_CODE (x
))
1614 rtx addr
= XEXP (x
, 0);
1615 bool changed
= false;
1617 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1622 x
= replace_equiv_address (x
, addr
);
1623 /* It may happen that the address with the virtual reg
1624 was valid (e.g. based on the virtual stack reg, which might
1625 be acceptable to the predicates with all offsets), whereas
1626 the address now isn't anymore, for instance when the address
1627 is still offsetted, but the base reg isn't virtual-stack-reg
1628 anymore. Below we would do a force_reg on the whole operand,
1629 but this insn might actually only accept memory. Hence,
1630 before doing that last resort, try to reload the address into
1631 a register, so this operand stays a MEM. */
1632 if (!safe_insn_predicate (insn_code
, i
, x
))
1634 addr
= force_reg (GET_MODE (addr
), addr
);
1635 x
= replace_equiv_address (x
, addr
);
1640 emit_insn_before (seq
, insn
);
1645 new_rtx
= instantiate_new_reg (x
, &offset
);
1646 if (new_rtx
== NULL
)
1654 /* Careful, special mode predicates may have stuff in
1655 insn_data[insn_code].operand[i].mode that isn't useful
1656 to us for computing a new value. */
1657 /* ??? Recognize address_operand and/or "p" constraints
1658 to see if (plus new offset) is a valid before we put
1659 this through expand_simple_binop. */
1660 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1661 gen_int_mode (offset
, GET_MODE (x
)),
1662 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1665 emit_insn_before (seq
, insn
);
1670 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1671 if (new_rtx
== NULL
)
1676 new_rtx
= expand_simple_binop
1677 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1678 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1679 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1682 emit_insn_before (seq
, insn
);
1684 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1685 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1693 /* At this point, X contains the new value for the operand.
1694 Validate the new value vs the insn predicate. Note that
1695 asm insns will have insn_code -1 here. */
1696 if (!safe_insn_predicate (insn_code
, i
, x
))
1701 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1702 x
= copy_to_reg (x
);
1705 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1709 emit_insn_before (seq
, insn
);
1712 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1718 /* Propagate operand changes into the duplicates. */
1719 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1720 *recog_data
.dup_loc
[i
]
1721 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1723 /* Force re-recognition of the instruction for validation. */
1724 INSN_CODE (insn
) = -1;
1727 if (asm_noperands (PATTERN (insn
)) >= 0)
1729 if (!check_asm_operands (PATTERN (insn
)))
1731 error_for_asm (insn
, "impossible constraint in %<asm%>");
1732 /* For asm goto, instead of fixing up all the edges
1733 just clear the template and clear input operands
1734 (asm goto doesn't have any output operands). */
1737 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1738 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1739 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1740 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1748 if (recog_memoized (insn
) < 0)
1749 fatal_insn_not_found (insn
);
1753 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1754 do any instantiation required. */
1757 instantiate_decl_rtl (rtx x
)
1764 /* If this is a CONCAT, recurse for the pieces. */
1765 if (GET_CODE (x
) == CONCAT
)
1767 instantiate_decl_rtl (XEXP (x
, 0));
1768 instantiate_decl_rtl (XEXP (x
, 1));
1772 /* If this is not a MEM, no need to do anything. Similarly if the
1773 address is a constant or a register that is not a virtual register. */
1778 if (CONSTANT_P (addr
)
1780 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1781 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1784 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1787 /* Helper for instantiate_decls called via walk_tree: Process all decls
1788 in the given DECL_VALUE_EXPR. */
1791 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1799 if (DECL_RTL_SET_P (t
))
1800 instantiate_decl_rtl (DECL_RTL (t
));
1801 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1802 && DECL_INCOMING_RTL (t
))
1803 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1804 if ((TREE_CODE (t
) == VAR_DECL
1805 || TREE_CODE (t
) == RESULT_DECL
)
1806 && DECL_HAS_VALUE_EXPR_P (t
))
1808 tree v
= DECL_VALUE_EXPR (t
);
1809 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1816 /* Subroutine of instantiate_decls: Process all decls in the given
1817 BLOCK node and all its subblocks. */
1820 instantiate_decls_1 (tree let
)
1824 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1826 if (DECL_RTL_SET_P (t
))
1827 instantiate_decl_rtl (DECL_RTL (t
));
1828 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1830 tree v
= DECL_VALUE_EXPR (t
);
1831 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1835 /* Process all subblocks. */
1836 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1837 instantiate_decls_1 (t
);
1840 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1841 all virtual registers in their DECL_RTL's. */
1844 instantiate_decls (tree fndecl
)
1849 /* Process all parameters of the function. */
1850 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1852 instantiate_decl_rtl (DECL_RTL (decl
));
1853 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1854 if (DECL_HAS_VALUE_EXPR_P (decl
))
1856 tree v
= DECL_VALUE_EXPR (decl
);
1857 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1861 if ((decl
= DECL_RESULT (fndecl
))
1862 && TREE_CODE (decl
) == RESULT_DECL
)
1864 if (DECL_RTL_SET_P (decl
))
1865 instantiate_decl_rtl (DECL_RTL (decl
));
1866 if (DECL_HAS_VALUE_EXPR_P (decl
))
1868 tree v
= DECL_VALUE_EXPR (decl
);
1869 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1873 /* Now process all variables defined in the function or its subblocks. */
1874 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1876 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1877 if (DECL_RTL_SET_P (decl
))
1878 instantiate_decl_rtl (DECL_RTL (decl
));
1879 vec_free (cfun
->local_decls
);
1882 /* Pass through the INSNS of function FNDECL and convert virtual register
1883 references to hard register references. */
1886 instantiate_virtual_regs (void)
1890 /* Compute the offsets to use for this function. */
1891 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1892 var_offset
= STARTING_FRAME_OFFSET
;
1893 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1894 out_arg_offset
= STACK_POINTER_OFFSET
;
1895 #ifdef FRAME_POINTER_CFA_OFFSET
1896 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1898 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1901 /* Initialize recognition, indicating that volatile is OK. */
1904 /* Scan through all the insns, instantiating every virtual register still
1906 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1909 /* These patterns in the instruction stream can never be recognized.
1910 Fortunately, they shouldn't contain virtual registers either. */
1911 if (GET_CODE (PATTERN (insn
)) == USE
1912 || GET_CODE (PATTERN (insn
)) == CLOBBER
1913 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1915 else if (DEBUG_INSN_P (insn
))
1916 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1917 instantiate_virtual_regs_in_rtx
, NULL
);
1919 instantiate_virtual_regs_in_insn (insn
);
1921 if (INSN_DELETED_P (insn
))
1924 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1926 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1928 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1929 instantiate_virtual_regs_in_rtx
, NULL
);
1932 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1933 instantiate_decls (current_function_decl
);
1935 targetm
.instantiate_decls ();
1937 /* Indicate that, from now on, assign_stack_local should use
1938 frame_pointer_rtx. */
1939 virtuals_instantiated
= 1;
1946 const pass_data pass_data_instantiate_virtual_regs
=
1948 RTL_PASS
, /* type */
1950 OPTGROUP_NONE
, /* optinfo_flags */
1951 false, /* has_gate */
1952 true, /* has_execute */
1953 TV_NONE
, /* tv_id */
1954 0, /* properties_required */
1955 0, /* properties_provided */
1956 0, /* properties_destroyed */
1957 0, /* todo_flags_start */
1958 0, /* todo_flags_finish */
1961 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1964 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1965 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1968 /* opt_pass methods: */
1969 unsigned int execute () { return instantiate_virtual_regs (); }
1971 }; // class pass_instantiate_virtual_regs
1976 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1978 return new pass_instantiate_virtual_regs (ctxt
);
1982 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1983 This means a type for which function calls must pass an address to the
1984 function or get an address back from the function.
1985 EXP may be a type node or an expression (whose type is tested). */
1988 aggregate_value_p (const_tree exp
, const_tree fntype
)
1990 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1991 int i
, regno
, nregs
;
1995 switch (TREE_CODE (fntype
))
1999 tree fndecl
= get_callee_fndecl (fntype
);
2001 ? TREE_TYPE (fndecl
)
2002 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
2006 fntype
= TREE_TYPE (fntype
);
2011 case IDENTIFIER_NODE
:
2015 /* We don't expect other tree types here. */
2019 if (VOID_TYPE_P (type
))
2022 /* If a record should be passed the same as its first (and only) member
2023 don't pass it as an aggregate. */
2024 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2025 return aggregate_value_p (first_field (type
), fntype
);
2027 /* If the front end has decided that this needs to be passed by
2028 reference, do so. */
2029 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2030 && DECL_BY_REFERENCE (exp
))
2033 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2034 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2037 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2038 and thus can't be returned in registers. */
2039 if (TREE_ADDRESSABLE (type
))
2042 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2045 /* Pointers-to-shared must be considered as aggregates for
2046 the purpose of passing them as return values, but only
2047 when the underlying mode of the representation would
2048 require that its value be passed on the stack.
2049 This occurs when using the 'struct' representation
2050 of a shared pointer. */
2051 if (flag_pcc_struct_return
&& POINTER_TYPE_P (type
)
2052 && upc_shared_type_p (TREE_TYPE (type
))
2053 && AGGREGATE_TYPE_P (upc_pts_rep_type_node
))
2056 if (targetm
.calls
.return_in_memory (type
, fntype
))
2059 /* Make sure we have suitable call-clobbered regs to return
2060 the value in; if not, we must return it in memory. */
2061 reg
= hard_function_value (type
, 0, fntype
, 0);
2063 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2068 regno
= REGNO (reg
);
2069 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2070 for (i
= 0; i
< nregs
; i
++)
2071 if (! call_used_regs
[regno
+ i
])
2077 /* Return true if we should assign DECL a pseudo register; false if it
2078 should live on the local stack. */
2081 use_register_for_decl (const_tree decl
)
2083 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2086 /* Honor volatile. */
2087 if (TREE_SIDE_EFFECTS (decl
))
2090 /* Honor addressability. */
2091 if (TREE_ADDRESSABLE (decl
))
2094 /* Only register-like things go in registers. */
2095 if (DECL_MODE (decl
) == BLKmode
)
2098 /* If -ffloat-store specified, don't put explicit float variables
2100 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2101 propagates values across these stores, and it probably shouldn't. */
2102 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2105 /* If we're not interested in tracking debugging information for
2106 this decl, then we can certainly put it in a register. */
2107 if (DECL_IGNORED_P (decl
))
2113 if (!DECL_REGISTER (decl
))
2116 switch (TREE_CODE (TREE_TYPE (decl
)))
2120 case QUAL_UNION_TYPE
:
2121 /* When not optimizing, disregard register keyword for variables with
2122 types containing methods, otherwise the methods won't be callable
2123 from the debugger. */
2124 if (TYPE_METHODS (TREE_TYPE (decl
)))
2134 /* Return true if TYPE should be passed by invisible reference. */
2137 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2138 tree type
, bool named_arg
)
2142 /* If this type contains non-trivial constructors, then it is
2143 forbidden for the middle-end to create any new copies. */
2144 if (TREE_ADDRESSABLE (type
))
2147 /* GCC post 3.4 passes *all* variable sized types by reference. */
2148 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2151 /* If a record type should be passed the same as its first (and only)
2152 member, use the type and mode of that member. */
2153 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2155 type
= TREE_TYPE (first_field (type
));
2156 mode
= TYPE_MODE (type
);
2160 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2164 /* Return true if TYPE, which is passed by reference, should be callee
2165 copied instead of caller copied. */
2168 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2169 tree type
, bool named_arg
)
2171 if (type
&& TREE_ADDRESSABLE (type
))
2173 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2177 /* Structures to communicate between the subroutines of assign_parms.
2178 The first holds data persistent across all parameters, the second
2179 is cleared out for each parameter. */
2181 struct assign_parm_data_all
2183 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2184 should become a job of the target or otherwise encapsulated. */
2185 CUMULATIVE_ARGS args_so_far_v
;
2186 cumulative_args_t args_so_far
;
2187 struct args_size stack_args_size
;
2188 tree function_result_decl
;
2190 rtx first_conversion_insn
;
2191 rtx last_conversion_insn
;
2192 HOST_WIDE_INT pretend_args_size
;
2193 HOST_WIDE_INT extra_pretend_bytes
;
2194 int reg_parm_stack_space
;
2197 struct assign_parm_data_one
2203 enum machine_mode nominal_mode
;
2204 enum machine_mode passed_mode
;
2205 enum machine_mode promoted_mode
;
2206 struct locate_and_pad_arg_data locate
;
2208 BOOL_BITFIELD named_arg
: 1;
2209 BOOL_BITFIELD passed_pointer
: 1;
2210 BOOL_BITFIELD on_stack
: 1;
2211 BOOL_BITFIELD loaded_in_reg
: 1;
2214 /* A subroutine of assign_parms. Initialize ALL. */
2217 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2219 tree fntype ATTRIBUTE_UNUSED
;
2221 memset (all
, 0, sizeof (*all
));
2223 fntype
= TREE_TYPE (current_function_decl
);
2225 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2226 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2228 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2229 current_function_decl
, -1);
2231 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2233 #ifdef REG_PARM_STACK_SPACE
2234 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2238 /* If ARGS contains entries with complex types, split the entry into two
2239 entries of the component type. Return a new list of substitutions are
2240 needed, else the old list. */
2243 split_complex_args (vec
<tree
> *args
)
2248 FOR_EACH_VEC_ELT (*args
, i
, p
)
2250 tree type
= TREE_TYPE (p
);
2251 if (TREE_CODE (type
) == COMPLEX_TYPE
2252 && targetm
.calls
.split_complex_arg (type
))
2255 tree subtype
= TREE_TYPE (type
);
2256 bool addressable
= TREE_ADDRESSABLE (p
);
2258 /* Rewrite the PARM_DECL's type with its component. */
2260 TREE_TYPE (p
) = subtype
;
2261 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2262 DECL_MODE (p
) = VOIDmode
;
2263 DECL_SIZE (p
) = NULL
;
2264 DECL_SIZE_UNIT (p
) = NULL
;
2265 /* If this arg must go in memory, put it in a pseudo here.
2266 We can't allow it to go in memory as per normal parms,
2267 because the usual place might not have the imag part
2268 adjacent to the real part. */
2269 DECL_ARTIFICIAL (p
) = addressable
;
2270 DECL_IGNORED_P (p
) = addressable
;
2271 TREE_ADDRESSABLE (p
) = 0;
2275 /* Build a second synthetic decl. */
2276 decl
= build_decl (EXPR_LOCATION (p
),
2277 PARM_DECL
, NULL_TREE
, subtype
);
2278 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2279 DECL_ARTIFICIAL (decl
) = addressable
;
2280 DECL_IGNORED_P (decl
) = addressable
;
2281 layout_decl (decl
, 0);
2282 args
->safe_insert (++i
, decl
);
2287 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2288 the hidden struct return argument, and (abi willing) complex args.
2289 Return the new parameter list. */
2292 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2294 tree fndecl
= current_function_decl
;
2295 tree fntype
= TREE_TYPE (fndecl
);
2296 vec
<tree
> fnargs
= vNULL
;
2299 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2300 fnargs
.safe_push (arg
);
2302 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2304 /* If struct value address is treated as the first argument, make it so. */
2305 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2306 && ! cfun
->returns_pcc_struct
2307 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2309 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2312 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2313 PARM_DECL
, get_identifier (".result_ptr"), type
);
2314 DECL_ARG_TYPE (decl
) = type
;
2315 DECL_ARTIFICIAL (decl
) = 1;
2316 DECL_NAMELESS (decl
) = 1;
2317 TREE_CONSTANT (decl
) = 1;
2319 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2320 all
->orig_fnargs
= decl
;
2321 fnargs
.safe_insert (0, decl
);
2323 all
->function_result_decl
= decl
;
2326 /* If the target wants to split complex arguments into scalars, do so. */
2327 if (targetm
.calls
.split_complex_arg
)
2328 split_complex_args (&fnargs
);
2333 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2334 data for the parameter. Incorporate ABI specifics such as pass-by-
2335 reference and type promotion. */
2338 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2339 struct assign_parm_data_one
*data
)
2341 tree nominal_type
, passed_type
;
2342 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2345 memset (data
, 0, sizeof (*data
));
2347 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2349 data
->named_arg
= 1; /* No variadic parms. */
2350 else if (DECL_CHAIN (parm
))
2351 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2352 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2353 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2355 data
->named_arg
= 0; /* Treat as variadic. */
2357 nominal_type
= TREE_TYPE (parm
);
2358 passed_type
= DECL_ARG_TYPE (parm
);
2360 /* Look out for errors propagating this far. Also, if the parameter's
2361 type is void then its value doesn't matter. */
2362 if (TREE_TYPE (parm
) == error_mark_node
2363 /* This can happen after weird syntax errors
2364 or if an enum type is defined among the parms. */
2365 || TREE_CODE (parm
) != PARM_DECL
2366 || passed_type
== NULL
2367 || VOID_TYPE_P (nominal_type
))
2369 nominal_type
= passed_type
= void_type_node
;
2370 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2374 /* Find mode of arg as it is passed, and mode of arg as it should be
2375 during execution of this function. */
2376 passed_mode
= TYPE_MODE (passed_type
);
2377 nominal_mode
= TYPE_MODE (nominal_type
);
2379 /* If the parm is to be passed as a transparent union or record, use the
2380 type of the first field for the tests below. We have already verified
2381 that the modes are the same. */
2382 if ((TREE_CODE (passed_type
) == UNION_TYPE
2383 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2384 && TYPE_TRANSPARENT_AGGR (passed_type
))
2385 passed_type
= TREE_TYPE (first_field (passed_type
));
2387 /* See if this arg was passed by invisible reference. */
2388 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2389 passed_type
, data
->named_arg
))
2391 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2392 data
->passed_pointer
= true;
2393 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2396 /* Find mode as it is passed by the ABI. */
2397 unsignedp
= TYPE_UNSIGNED (passed_type
);
2398 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2399 TREE_TYPE (current_function_decl
), 0);
2402 data
->nominal_type
= nominal_type
;
2403 data
->passed_type
= passed_type
;
2404 data
->nominal_mode
= nominal_mode
;
2405 data
->passed_mode
= passed_mode
;
2406 data
->promoted_mode
= promoted_mode
;
2409 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2412 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2413 struct assign_parm_data_one
*data
, bool no_rtl
)
2415 int varargs_pretend_bytes
= 0;
2417 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2418 data
->promoted_mode
,
2420 &varargs_pretend_bytes
, no_rtl
);
2422 /* If the back-end has requested extra stack space, record how much is
2423 needed. Do not change pretend_args_size otherwise since it may be
2424 nonzero from an earlier partial argument. */
2425 if (varargs_pretend_bytes
> 0)
2426 all
->pretend_args_size
= varargs_pretend_bytes
;
2429 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2430 the incoming location of the current parameter. */
2433 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2434 struct assign_parm_data_one
*data
)
2436 HOST_WIDE_INT pretend_bytes
= 0;
2440 if (data
->promoted_mode
== VOIDmode
)
2442 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2446 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2447 data
->promoted_mode
,
2451 if (entry_parm
== 0)
2452 data
->promoted_mode
= data
->passed_mode
;
2454 /* Determine parm's home in the stack, in case it arrives in the stack
2455 or we should pretend it did. Compute the stack position and rtx where
2456 the argument arrives and its size.
2458 There is one complexity here: If this was a parameter that would
2459 have been passed in registers, but wasn't only because it is
2460 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2461 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2462 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2463 as it was the previous time. */
2464 in_regs
= entry_parm
!= 0;
2465 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2468 if (!in_regs
&& !data
->named_arg
)
2470 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2473 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2474 data
->promoted_mode
,
2475 data
->passed_type
, true);
2476 in_regs
= tem
!= NULL
;
2480 /* If this parameter was passed both in registers and in the stack, use
2481 the copy on the stack. */
2482 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2490 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2491 data
->promoted_mode
,
2494 data
->partial
= partial
;
2496 /* The caller might already have allocated stack space for the
2497 register parameters. */
2498 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2500 /* Part of this argument is passed in registers and part
2501 is passed on the stack. Ask the prologue code to extend
2502 the stack part so that we can recreate the full value.
2504 PRETEND_BYTES is the size of the registers we need to store.
2505 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2506 stack space that the prologue should allocate.
2508 Internally, gcc assumes that the argument pointer is aligned
2509 to STACK_BOUNDARY bits. This is used both for alignment
2510 optimizations (see init_emit) and to locate arguments that are
2511 aligned to more than PARM_BOUNDARY bits. We must preserve this
2512 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2513 a stack boundary. */
2515 /* We assume at most one partial arg, and it must be the first
2516 argument on the stack. */
2517 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2519 pretend_bytes
= partial
;
2520 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2522 /* We want to align relative to the actual stack pointer, so
2523 don't include this in the stack size until later. */
2524 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2528 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2529 entry_parm
? data
->partial
: 0, current_function_decl
,
2530 &all
->stack_args_size
, &data
->locate
);
2532 /* Update parm_stack_boundary if this parameter is passed in the
2534 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2535 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2537 /* Adjust offsets to include the pretend args. */
2538 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2539 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2540 data
->locate
.offset
.constant
+= pretend_bytes
;
2542 data
->entry_parm
= entry_parm
;
2545 /* A subroutine of assign_parms. If there is actually space on the stack
2546 for this parm, count it in stack_args_size and return true. */
2549 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2550 struct assign_parm_data_one
*data
)
2552 /* Trivially true if we've no incoming register. */
2553 if (data
->entry_parm
== NULL
)
2555 /* Also true if we're partially in registers and partially not,
2556 since we've arranged to drop the entire argument on the stack. */
2557 else if (data
->partial
!= 0)
2559 /* Also true if the target says that it's passed in both registers
2560 and on the stack. */
2561 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2562 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2564 /* Also true if the target says that there's stack allocated for
2565 all register parameters. */
2566 else if (all
->reg_parm_stack_space
> 0)
2568 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2572 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2573 if (data
->locate
.size
.var
)
2574 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2579 /* A subroutine of assign_parms. Given that this parameter is allocated
2580 stack space by the ABI, find it. */
2583 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2585 rtx offset_rtx
, stack_parm
;
2586 unsigned int align
, boundary
;
2588 /* If we're passing this arg using a reg, make its stack home the
2589 aligned stack slot. */
2590 if (data
->entry_parm
)
2591 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2593 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2595 stack_parm
= crtl
->args
.internal_arg_pointer
;
2596 if (offset_rtx
!= const0_rtx
)
2597 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2598 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2600 if (!data
->passed_pointer
)
2602 set_mem_attributes (stack_parm
, parm
, 1);
2603 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2604 while promoted mode's size is needed. */
2605 if (data
->promoted_mode
!= BLKmode
2606 && data
->promoted_mode
!= DECL_MODE (parm
))
2608 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2609 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2611 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2612 data
->promoted_mode
);
2614 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2619 boundary
= data
->locate
.boundary
;
2620 align
= BITS_PER_UNIT
;
2622 /* If we're padding upward, we know that the alignment of the slot
2623 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2624 intentionally forcing upward padding. Otherwise we have to come
2625 up with a guess at the alignment based on OFFSET_RTX. */
2626 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2628 else if (CONST_INT_P (offset_rtx
))
2630 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2631 align
= align
& -align
;
2633 set_mem_align (stack_parm
, align
);
2635 if (data
->entry_parm
)
2636 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2638 data
->stack_parm
= stack_parm
;
2641 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2642 always valid and contiguous. */
2645 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2647 rtx entry_parm
= data
->entry_parm
;
2648 rtx stack_parm
= data
->stack_parm
;
2650 /* If this parm was passed part in regs and part in memory, pretend it
2651 arrived entirely in memory by pushing the register-part onto the stack.
2652 In the special case of a DImode or DFmode that is split, we could put
2653 it together in a pseudoreg directly, but for now that's not worth
2655 if (data
->partial
!= 0)
2657 /* Handle calls that pass values in multiple non-contiguous
2658 locations. The Irix 6 ABI has examples of this. */
2659 if (GET_CODE (entry_parm
) == PARALLEL
)
2660 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2662 int_size_in_bytes (data
->passed_type
));
2665 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2666 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2667 data
->partial
/ UNITS_PER_WORD
);
2670 entry_parm
= stack_parm
;
2673 /* If we didn't decide this parm came in a register, by default it came
2675 else if (entry_parm
== NULL
)
2676 entry_parm
= stack_parm
;
2678 /* When an argument is passed in multiple locations, we can't make use
2679 of this information, but we can save some copying if the whole argument
2680 is passed in a single register. */
2681 else if (GET_CODE (entry_parm
) == PARALLEL
2682 && data
->nominal_mode
!= BLKmode
2683 && data
->passed_mode
!= BLKmode
)
2685 size_t i
, len
= XVECLEN (entry_parm
, 0);
2687 for (i
= 0; i
< len
; i
++)
2688 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2689 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2690 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2691 == data
->passed_mode
)
2692 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2694 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2699 data
->entry_parm
= entry_parm
;
2702 /* A subroutine of assign_parms. Reconstitute any values which were
2703 passed in multiple registers and would fit in a single register. */
2706 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2708 rtx entry_parm
= data
->entry_parm
;
2710 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2711 This can be done with register operations rather than on the
2712 stack, even if we will store the reconstituted parameter on the
2714 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2716 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2717 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2718 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2719 entry_parm
= parmreg
;
2722 data
->entry_parm
= entry_parm
;
2725 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2726 always valid and properly aligned. */
2729 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2731 rtx stack_parm
= data
->stack_parm
;
2733 /* If we can't trust the parm stack slot to be aligned enough for its
2734 ultimate type, don't use that slot after entry. We'll make another
2735 stack slot, if we need one. */
2737 && ((STRICT_ALIGNMENT
2738 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2739 || (data
->nominal_type
2740 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2741 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2744 /* If parm was passed in memory, and we need to convert it on entry,
2745 don't store it back in that same slot. */
2746 else if (data
->entry_parm
== stack_parm
2747 && data
->nominal_mode
!= BLKmode
2748 && data
->nominal_mode
!= data
->passed_mode
)
2751 /* If stack protection is in effect for this function, don't leave any
2752 pointers in their passed stack slots. */
2753 else if (crtl
->stack_protect_guard
2754 && (flag_stack_protect
== 2
2755 || data
->passed_pointer
2756 || POINTER_TYPE_P (data
->nominal_type
)))
2759 data
->stack_parm
= stack_parm
;
2762 /* A subroutine of assign_parms. Return true if the current parameter
2763 should be stored as a BLKmode in the current frame. */
2766 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2768 if (data
->nominal_mode
== BLKmode
)
2770 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2773 #ifdef BLOCK_REG_PADDING
2774 /* Only assign_parm_setup_block knows how to deal with register arguments
2775 that are padded at the least significant end. */
2776 if (REG_P (data
->entry_parm
)
2777 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2778 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2779 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2786 /* A subroutine of assign_parms. Arrange for the parameter to be
2787 present and valid in DATA->STACK_RTL. */
2790 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2791 tree parm
, struct assign_parm_data_one
*data
)
2793 rtx entry_parm
= data
->entry_parm
;
2794 rtx stack_parm
= data
->stack_parm
;
2796 HOST_WIDE_INT size_stored
;
2798 if (GET_CODE (entry_parm
) == PARALLEL
)
2799 entry_parm
= emit_group_move_into_temps (entry_parm
);
2801 size
= int_size_in_bytes (data
->passed_type
);
2802 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2803 if (stack_parm
== 0)
2805 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2806 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2808 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2809 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2810 set_mem_attributes (stack_parm
, parm
, 1);
2813 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2814 calls that pass values in multiple non-contiguous locations. */
2815 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2819 /* Note that we will be storing an integral number of words.
2820 So we have to be careful to ensure that we allocate an
2821 integral number of words. We do this above when we call
2822 assign_stack_local if space was not allocated in the argument
2823 list. If it was, this will not work if PARM_BOUNDARY is not
2824 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2825 if it becomes a problem. Exception is when BLKmode arrives
2826 with arguments not conforming to word_mode. */
2828 if (data
->stack_parm
== 0)
2830 else if (GET_CODE (entry_parm
) == PARALLEL
)
2833 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2835 mem
= validize_mem (stack_parm
);
2837 /* Handle values in multiple non-contiguous locations. */
2838 if (GET_CODE (entry_parm
) == PARALLEL
)
2840 push_to_sequence2 (all
->first_conversion_insn
,
2841 all
->last_conversion_insn
);
2842 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2843 all
->first_conversion_insn
= get_insns ();
2844 all
->last_conversion_insn
= get_last_insn ();
2851 /* If SIZE is that of a mode no bigger than a word, just use
2852 that mode's store operation. */
2853 else if (size
<= UNITS_PER_WORD
)
2855 enum machine_mode mode
2856 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2859 #ifdef BLOCK_REG_PADDING
2860 && (size
== UNITS_PER_WORD
2861 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2862 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2868 /* We are really truncating a word_mode value containing
2869 SIZE bytes into a value of mode MODE. If such an
2870 operation requires no actual instructions, we can refer
2871 to the value directly in mode MODE, otherwise we must
2872 start with the register in word_mode and explicitly
2874 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2875 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2878 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2879 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2881 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2884 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2885 machine must be aligned to the left before storing
2886 to memory. Note that the previous test doesn't
2887 handle all cases (e.g. SIZE == 3). */
2888 else if (size
!= UNITS_PER_WORD
2889 #ifdef BLOCK_REG_PADDING
2890 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2898 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2899 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2901 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2902 tem
= change_address (mem
, word_mode
, 0);
2903 emit_move_insn (tem
, x
);
2906 move_block_from_reg (REGNO (entry_parm
), mem
,
2907 size_stored
/ UNITS_PER_WORD
);
2910 move_block_from_reg (REGNO (entry_parm
), mem
,
2911 size_stored
/ UNITS_PER_WORD
);
2913 else if (data
->stack_parm
== 0)
2915 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2916 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2918 all
->first_conversion_insn
= get_insns ();
2919 all
->last_conversion_insn
= get_last_insn ();
2923 data
->stack_parm
= stack_parm
;
2924 SET_DECL_RTL (parm
, stack_parm
);
2927 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2928 parameter. Get it there. Perform all ABI specified conversions. */
2931 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2932 struct assign_parm_data_one
*data
)
2934 rtx parmreg
, validated_mem
;
2935 rtx equiv_stack_parm
;
2936 enum machine_mode promoted_nominal_mode
;
2937 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2938 bool did_conversion
= false;
2939 bool need_conversion
, moved
;
2941 /* Store the parm in a pseudoregister during the function, but we may
2942 need to do it in a wider mode. Using 2 here makes the result
2943 consistent with promote_decl_mode and thus expand_expr_real_1. */
2944 promoted_nominal_mode
2945 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2946 TREE_TYPE (current_function_decl
), 2);
2948 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2950 if (!DECL_ARTIFICIAL (parm
))
2951 mark_user_reg (parmreg
);
2953 /* If this was an item that we received a pointer to,
2954 set DECL_RTL appropriately. */
2955 if (data
->passed_pointer
)
2957 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2958 set_mem_attributes (x
, parm
, 1);
2959 SET_DECL_RTL (parm
, x
);
2962 SET_DECL_RTL (parm
, parmreg
);
2964 assign_parm_remove_parallels (data
);
2966 /* Copy the value into the register, thus bridging between
2967 assign_parm_find_data_types and expand_expr_real_1. */
2969 equiv_stack_parm
= data
->stack_parm
;
2970 validated_mem
= validize_mem (data
->entry_parm
);
2972 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2973 || promoted_nominal_mode
!= data
->promoted_mode
);
2977 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2978 && data
->nominal_mode
== data
->passed_mode
2979 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2981 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2982 mode, by the caller. We now have to convert it to
2983 NOMINAL_MODE, if different. However, PARMREG may be in
2984 a different mode than NOMINAL_MODE if it is being stored
2987 If ENTRY_PARM is a hard register, it might be in a register
2988 not valid for operating in its mode (e.g., an odd-numbered
2989 register for a DFmode). In that case, moves are the only
2990 thing valid, so we can't do a convert from there. This
2991 occurs when the calling sequence allow such misaligned
2994 In addition, the conversion may involve a call, which could
2995 clobber parameters which haven't been copied to pseudo
2998 First, we try to emit an insn which performs the necessary
2999 conversion. We verify that this insn does not clobber any
3002 enum insn_code icode
;
3005 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3009 op1
= validated_mem
;
3010 if (icode
!= CODE_FOR_nothing
3011 && insn_operand_matches (icode
, 0, op0
)
3012 && insn_operand_matches (icode
, 1, op1
))
3014 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3015 rtx insn
, insns
, t
= op1
;
3016 HARD_REG_SET hardregs
;
3019 /* If op1 is a hard register that is likely spilled, first
3020 force it into a pseudo, otherwise combiner might extend
3021 its lifetime too much. */
3022 if (GET_CODE (t
) == SUBREG
)
3025 && HARD_REGISTER_P (t
)
3026 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3027 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3029 t
= gen_reg_rtx (GET_MODE (op1
));
3030 emit_move_insn (t
, op1
);
3034 insn
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3035 data
->passed_mode
, unsignedp
);
3037 insns
= get_insns ();
3040 CLEAR_HARD_REG_SET (hardregs
);
3041 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3044 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3046 if (!hard_reg_set_empty_p (hardregs
))
3055 if (equiv_stack_parm
!= NULL_RTX
)
3056 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3063 /* Nothing to do. */
3065 else if (need_conversion
)
3067 /* We did not have an insn to convert directly, or the sequence
3068 generated appeared unsafe. We must first copy the parm to a
3069 pseudo reg, and save the conversion until after all
3070 parameters have been moved. */
3073 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3075 emit_move_insn (tempreg
, validated_mem
);
3077 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3078 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3080 if (GET_CODE (tempreg
) == SUBREG
3081 && GET_MODE (tempreg
) == data
->nominal_mode
3082 && REG_P (SUBREG_REG (tempreg
))
3083 && data
->nominal_mode
== data
->passed_mode
3084 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3085 && GET_MODE_SIZE (GET_MODE (tempreg
))
3086 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3088 /* The argument is already sign/zero extended, so note it
3090 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3091 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3094 /* TREE_USED gets set erroneously during expand_assignment. */
3095 save_tree_used
= TREE_USED (parm
);
3096 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3097 TREE_USED (parm
) = save_tree_used
;
3098 all
->first_conversion_insn
= get_insns ();
3099 all
->last_conversion_insn
= get_last_insn ();
3102 did_conversion
= true;
3105 emit_move_insn (parmreg
, validated_mem
);
3107 /* If we were passed a pointer but the actual value can safely live
3108 in a register, retrieve it and use it directly. */
3109 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3111 /* We can't use nominal_mode, because it will have been set to
3112 Pmode above. We must use the actual mode of the parm. */
3113 if (use_register_for_decl (parm
))
3115 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3116 mark_user_reg (parmreg
);
3120 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3121 TYPE_MODE (TREE_TYPE (parm
)),
3122 TYPE_ALIGN (TREE_TYPE (parm
)));
3124 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3125 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3127 set_mem_attributes (parmreg
, parm
, 1);
3130 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3132 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3133 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3135 push_to_sequence2 (all
->first_conversion_insn
,
3136 all
->last_conversion_insn
);
3137 emit_move_insn (tempreg
, DECL_RTL (parm
));
3138 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3139 emit_move_insn (parmreg
, tempreg
);
3140 all
->first_conversion_insn
= get_insns ();
3141 all
->last_conversion_insn
= get_last_insn ();
3144 did_conversion
= true;
3147 emit_move_insn (parmreg
, DECL_RTL (parm
));
3149 SET_DECL_RTL (parm
, parmreg
);
3151 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3153 data
->stack_parm
= NULL
;
3156 /* Mark the register as eliminable if we did no conversion and it was
3157 copied from memory at a fixed offset, and the arg pointer was not
3158 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3159 offset formed an invalid address, such memory-equivalences as we
3160 make here would screw up life analysis for it. */
3161 if (data
->nominal_mode
== data
->passed_mode
3163 && data
->stack_parm
!= 0
3164 && MEM_P (data
->stack_parm
)
3165 && data
->locate
.offset
.var
== 0
3166 && reg_mentioned_p (virtual_incoming_args_rtx
,
3167 XEXP (data
->stack_parm
, 0)))
3169 rtx linsn
= get_last_insn ();
3172 /* Mark complex types separately. */
3173 if (GET_CODE (parmreg
) == CONCAT
)
3175 enum machine_mode submode
3176 = GET_MODE_INNER (GET_MODE (parmreg
));
3177 int regnor
= REGNO (XEXP (parmreg
, 0));
3178 int regnoi
= REGNO (XEXP (parmreg
, 1));
3179 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3180 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3181 GET_MODE_SIZE (submode
));
3183 /* Scan backwards for the set of the real and
3185 for (sinsn
= linsn
; sinsn
!= 0;
3186 sinsn
= prev_nonnote_insn (sinsn
))
3188 set
= single_set (sinsn
);
3192 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3193 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3194 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3195 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3199 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3202 /* For pointer data type, suggest pointer register. */
3203 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3204 mark_reg_pointer (parmreg
,
3205 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3208 /* A subroutine of assign_parms. Allocate stack space to hold the current
3209 parameter. Get it there. Perform all ABI specified conversions. */
3212 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3213 struct assign_parm_data_one
*data
)
3215 /* Value must be stored in the stack slot STACK_PARM during function
3217 bool to_conversion
= false;
3219 assign_parm_remove_parallels (data
);
3221 if (data
->promoted_mode
!= data
->nominal_mode
)
3223 /* Conversion is required. */
3224 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3226 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3228 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3229 to_conversion
= true;
3231 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3232 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3234 if (data
->stack_parm
)
3236 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3237 GET_MODE (data
->stack_parm
));
3238 /* ??? This may need a big-endian conversion on sparc64. */
3240 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3241 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3242 set_mem_offset (data
->stack_parm
,
3243 MEM_OFFSET (data
->stack_parm
) + offset
);
3247 if (data
->entry_parm
!= data
->stack_parm
)
3251 if (data
->stack_parm
== 0)
3253 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3254 GET_MODE (data
->entry_parm
),
3255 TYPE_ALIGN (data
->passed_type
));
3257 = assign_stack_local (GET_MODE (data
->entry_parm
),
3258 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3260 set_mem_attributes (data
->stack_parm
, parm
, 1);
3263 dest
= validize_mem (data
->stack_parm
);
3264 src
= validize_mem (data
->entry_parm
);
3268 /* Use a block move to handle potentially misaligned entry_parm. */
3270 push_to_sequence2 (all
->first_conversion_insn
,
3271 all
->last_conversion_insn
);
3272 to_conversion
= true;
3274 emit_block_move (dest
, src
,
3275 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3279 emit_move_insn (dest
, src
);
3284 all
->first_conversion_insn
= get_insns ();
3285 all
->last_conversion_insn
= get_last_insn ();
3289 SET_DECL_RTL (parm
, data
->stack_parm
);
3292 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3293 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3296 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3300 tree orig_fnargs
= all
->orig_fnargs
;
3303 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3305 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3306 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3308 rtx tmp
, real
, imag
;
3309 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3311 real
= DECL_RTL (fnargs
[i
]);
3312 imag
= DECL_RTL (fnargs
[i
+ 1]);
3313 if (inner
!= GET_MODE (real
))
3315 real
= gen_lowpart_SUBREG (inner
, real
);
3316 imag
= gen_lowpart_SUBREG (inner
, imag
);
3319 if (TREE_ADDRESSABLE (parm
))
3322 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3323 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3325 TYPE_ALIGN (TREE_TYPE (parm
)));
3327 /* split_complex_arg put the real and imag parts in
3328 pseudos. Move them to memory. */
3329 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3330 set_mem_attributes (tmp
, parm
, 1);
3331 rmem
= adjust_address_nv (tmp
, inner
, 0);
3332 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3333 push_to_sequence2 (all
->first_conversion_insn
,
3334 all
->last_conversion_insn
);
3335 emit_move_insn (rmem
, real
);
3336 emit_move_insn (imem
, imag
);
3337 all
->first_conversion_insn
= get_insns ();
3338 all
->last_conversion_insn
= get_last_insn ();
3342 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3343 SET_DECL_RTL (parm
, tmp
);
3345 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3346 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3347 if (inner
!= GET_MODE (real
))
3349 real
= gen_lowpart_SUBREG (inner
, real
);
3350 imag
= gen_lowpart_SUBREG (inner
, imag
);
3352 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3353 set_decl_incoming_rtl (parm
, tmp
, false);
3359 /* Assign RTL expressions to the function's parameters. This may involve
3360 copying them into registers and using those registers as the DECL_RTL. */
3363 assign_parms (tree fndecl
)
3365 struct assign_parm_data_all all
;
3370 crtl
->args
.internal_arg_pointer
3371 = targetm
.calls
.internal_arg_pointer ();
3373 assign_parms_initialize_all (&all
);
3374 fnargs
= assign_parms_augmented_arg_list (&all
);
3376 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3378 struct assign_parm_data_one data
;
3380 /* Extract the type of PARM; adjust it according to ABI. */
3381 assign_parm_find_data_types (&all
, parm
, &data
);
3383 /* Early out for errors and void parameters. */
3384 if (data
.passed_mode
== VOIDmode
)
3386 SET_DECL_RTL (parm
, const0_rtx
);
3387 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3391 /* Estimate stack alignment from parameter alignment. */
3392 if (SUPPORTS_STACK_ALIGNMENT
)
3395 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3397 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3399 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3400 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3401 TYPE_MODE (data
.nominal_type
),
3402 TYPE_ALIGN (data
.nominal_type
));
3403 if (crtl
->stack_alignment_estimated
< align
)
3405 gcc_assert (!crtl
->stack_realign_processed
);
3406 crtl
->stack_alignment_estimated
= align
;
3410 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3411 assign_parms_setup_varargs (&all
, &data
, false);
3413 /* Find out where the parameter arrives in this function. */
3414 assign_parm_find_entry_rtl (&all
, &data
);
3416 /* Find out where stack space for this parameter might be. */
3417 if (assign_parm_is_stack_parm (&all
, &data
))
3419 assign_parm_find_stack_rtl (parm
, &data
);
3420 assign_parm_adjust_entry_rtl (&data
);
3423 /* Record permanently how this parm was passed. */
3424 if (data
.passed_pointer
)
3427 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3429 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3432 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3434 /* Update info on where next arg arrives in registers. */
3435 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3436 data
.passed_type
, data
.named_arg
);
3438 assign_parm_adjust_stack_rtl (&data
);
3440 if (assign_parm_setup_block_p (&data
))
3441 assign_parm_setup_block (&all
, parm
, &data
);
3442 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3443 assign_parm_setup_reg (&all
, parm
, &data
);
3445 assign_parm_setup_stack (&all
, parm
, &data
);
3448 if (targetm
.calls
.split_complex_arg
)
3449 assign_parms_unsplit_complex (&all
, fnargs
);
3453 /* Output all parameter conversion instructions (possibly including calls)
3454 now that all parameters have been copied out of hard registers. */
3455 emit_insn (all
.first_conversion_insn
);
3457 /* Estimate reload stack alignment from scalar return mode. */
3458 if (SUPPORTS_STACK_ALIGNMENT
)
3460 if (DECL_RESULT (fndecl
))
3462 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3463 enum machine_mode mode
= TYPE_MODE (type
);
3467 && !AGGREGATE_TYPE_P (type
))
3469 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3470 if (crtl
->stack_alignment_estimated
< align
)
3472 gcc_assert (!crtl
->stack_realign_processed
);
3473 crtl
->stack_alignment_estimated
= align
;
3479 /* If we are receiving a struct value address as the first argument, set up
3480 the RTL for the function result. As this might require code to convert
3481 the transmitted address to Pmode, we do this here to ensure that possible
3482 preliminary conversions of the address have been emitted already. */
3483 if (all
.function_result_decl
)
3485 tree result
= DECL_RESULT (current_function_decl
);
3486 rtx addr
= DECL_RTL (all
.function_result_decl
);
3489 if (DECL_BY_REFERENCE (result
))
3491 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3496 SET_DECL_VALUE_EXPR (result
,
3497 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3498 all
.function_result_decl
));
3499 addr
= convert_memory_address (Pmode
, addr
);
3500 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3501 set_mem_attributes (x
, result
, 1);
3504 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3506 SET_DECL_RTL (result
, x
);
3509 /* We have aligned all the args, so add space for the pretend args. */
3510 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3511 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3512 crtl
->args
.size
= all
.stack_args_size
.constant
;
3514 /* Adjust function incoming argument size for alignment and
3517 #ifdef REG_PARM_STACK_SPACE
3518 crtl
->args
.size
= MAX (crtl
->args
.size
,
3519 REG_PARM_STACK_SPACE (fndecl
));
3522 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3523 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3525 #ifdef ARGS_GROW_DOWNWARD
3526 crtl
->args
.arg_offset_rtx
3527 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3528 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3529 size_int (-all
.stack_args_size
.constant
)),
3530 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3532 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3535 /* See how many bytes, if any, of its args a function should try to pop
3538 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3542 /* For stdarg.h function, save info about
3543 regs and stack space used by the named args. */
3545 crtl
->args
.info
= all
.args_so_far_v
;
3547 /* Set the rtx used for the function return value. Put this in its
3548 own variable so any optimizers that need this information don't have
3549 to include tree.h. Do this here so it gets done when an inlined
3550 function gets output. */
3553 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3554 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3556 /* If scalar return value was computed in a pseudo-reg, or was a named
3557 return value that got dumped to the stack, copy that to the hard
3559 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3561 tree decl_result
= DECL_RESULT (fndecl
);
3562 rtx decl_rtl
= DECL_RTL (decl_result
);
3564 if (REG_P (decl_rtl
)
3565 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3566 : DECL_REGISTER (decl_result
))
3570 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3572 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3573 /* The delay slot scheduler assumes that crtl->return_rtx
3574 holds the hard register containing the return value, not a
3575 temporary pseudo. */
3576 crtl
->return_rtx
= real_decl_rtl
;
3581 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3582 For all seen types, gimplify their sizes. */
3585 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3592 if (POINTER_TYPE_P (t
))
3594 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3595 && !TYPE_SIZES_GIMPLIFIED (t
))
3597 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3605 /* Gimplify the parameter list for current_function_decl. This involves
3606 evaluating SAVE_EXPRs of variable sized parameters and generating code
3607 to implement callee-copies reference parameters. Returns a sequence of
3608 statements to add to the beginning of the function. */
3611 gimplify_parameters (void)
3613 struct assign_parm_data_all all
;
3615 gimple_seq stmts
= NULL
;
3619 assign_parms_initialize_all (&all
);
3620 fnargs
= assign_parms_augmented_arg_list (&all
);
3622 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3624 struct assign_parm_data_one data
;
3626 /* Extract the type of PARM; adjust it according to ABI. */
3627 assign_parm_find_data_types (&all
, parm
, &data
);
3629 /* Early out for errors and void parameters. */
3630 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3633 /* Update info on where next arg arrives in registers. */
3634 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3635 data
.passed_type
, data
.named_arg
);
3637 /* ??? Once upon a time variable_size stuffed parameter list
3638 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3639 turned out to be less than manageable in the gimple world.
3640 Now we have to hunt them down ourselves. */
3641 walk_tree_without_duplicates (&data
.passed_type
,
3642 gimplify_parm_type
, &stmts
);
3644 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3646 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3647 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3650 if (data
.passed_pointer
)
3652 tree type
= TREE_TYPE (data
.passed_type
);
3653 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3654 type
, data
.named_arg
))
3658 /* For constant-sized objects, this is trivial; for
3659 variable-sized objects, we have to play games. */
3660 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3661 && !(flag_stack_check
== GENERIC_STACK_CHECK
3662 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3663 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3665 local
= create_tmp_var (type
, get_name (parm
));
3666 DECL_IGNORED_P (local
) = 0;
3667 /* If PARM was addressable, move that flag over
3668 to the local copy, as its address will be taken,
3669 not the PARMs. Keep the parms address taken
3670 as we'll query that flag during gimplification. */
3671 if (TREE_ADDRESSABLE (parm
))
3672 TREE_ADDRESSABLE (local
) = 1;
3673 else if (TREE_CODE (type
) == COMPLEX_TYPE
3674 || TREE_CODE (type
) == VECTOR_TYPE
)
3675 DECL_GIMPLE_REG_P (local
) = 1;
3679 tree ptr_type
, addr
;
3681 ptr_type
= build_pointer_type (type
);
3682 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3683 DECL_IGNORED_P (addr
) = 0;
3684 local
= build_fold_indirect_ref (addr
);
3686 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3687 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3688 size_int (DECL_ALIGN (parm
)));
3690 /* The call has been built for a variable-sized object. */
3691 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3692 t
= fold_convert (ptr_type
, t
);
3693 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3694 gimplify_and_add (t
, &stmts
);
3697 gimplify_assign (local
, parm
, &stmts
);
3699 SET_DECL_VALUE_EXPR (parm
, local
);
3700 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3710 /* Compute the size and offset from the start of the stacked arguments for a
3711 parm passed in mode PASSED_MODE and with type TYPE.
3713 INITIAL_OFFSET_PTR points to the current offset into the stacked
3716 The starting offset and size for this parm are returned in
3717 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3718 nonzero, the offset is that of stack slot, which is returned in
3719 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3720 padding required from the initial offset ptr to the stack slot.
3722 IN_REGS is nonzero if the argument will be passed in registers. It will
3723 never be set if REG_PARM_STACK_SPACE is not defined.
3725 FNDECL is the function in which the argument was defined.
3727 There are two types of rounding that are done. The first, controlled by
3728 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3729 argument list to be aligned to the specific boundary (in bits). This
3730 rounding affects the initial and starting offsets, but not the argument
3733 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3734 optionally rounds the size of the parm to PARM_BOUNDARY. The
3735 initial offset is not affected by this rounding, while the size always
3736 is and the starting offset may be. */
3738 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3739 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3740 callers pass in the total size of args so far as
3741 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3744 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3745 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3746 struct args_size
*initial_offset_ptr
,
3747 struct locate_and_pad_arg_data
*locate
)
3750 enum direction where_pad
;
3751 unsigned int boundary
, round_boundary
;
3752 int reg_parm_stack_space
= 0;
3753 int part_size_in_regs
;
3755 #ifdef REG_PARM_STACK_SPACE
3756 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3758 /* If we have found a stack parm before we reach the end of the
3759 area reserved for registers, skip that area. */
3762 if (reg_parm_stack_space
> 0)
3764 if (initial_offset_ptr
->var
)
3766 initial_offset_ptr
->var
3767 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3768 ssize_int (reg_parm_stack_space
));
3769 initial_offset_ptr
->constant
= 0;
3771 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3772 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3775 #endif /* REG_PARM_STACK_SPACE */
3777 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3780 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3781 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3782 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3783 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3785 locate
->where_pad
= where_pad
;
3787 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3788 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3789 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3791 locate
->boundary
= boundary
;
3793 if (SUPPORTS_STACK_ALIGNMENT
)
3795 /* stack_alignment_estimated can't change after stack has been
3797 if (crtl
->stack_alignment_estimated
< boundary
)
3799 if (!crtl
->stack_realign_processed
)
3800 crtl
->stack_alignment_estimated
= boundary
;
3803 /* If stack is realigned and stack alignment value
3804 hasn't been finalized, it is OK not to increase
3805 stack_alignment_estimated. The bigger alignment
3806 requirement is recorded in stack_alignment_needed
3808 gcc_assert (!crtl
->stack_realign_finalized
3809 && crtl
->stack_realign_needed
);
3814 /* Remember if the outgoing parameter requires extra alignment on the
3815 calling function side. */
3816 if (crtl
->stack_alignment_needed
< boundary
)
3817 crtl
->stack_alignment_needed
= boundary
;
3818 if (crtl
->preferred_stack_boundary
< boundary
)
3819 crtl
->preferred_stack_boundary
= boundary
;
3821 #ifdef ARGS_GROW_DOWNWARD
3822 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3823 if (initial_offset_ptr
->var
)
3824 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3825 initial_offset_ptr
->var
);
3829 if (where_pad
!= none
3830 && (!host_integerp (sizetree
, 1)
3831 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3832 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
3833 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3836 locate
->slot_offset
.constant
+= part_size_in_regs
;
3839 #ifdef REG_PARM_STACK_SPACE
3840 || REG_PARM_STACK_SPACE (fndecl
) > 0
3843 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3844 &locate
->alignment_pad
);
3846 locate
->size
.constant
= (-initial_offset_ptr
->constant
3847 - locate
->slot_offset
.constant
);
3848 if (initial_offset_ptr
->var
)
3849 locate
->size
.var
= size_binop (MINUS_EXPR
,
3850 size_binop (MINUS_EXPR
,
3852 initial_offset_ptr
->var
),
3853 locate
->slot_offset
.var
);
3855 /* Pad_below needs the pre-rounded size to know how much to pad
3857 locate
->offset
= locate
->slot_offset
;
3858 if (where_pad
== downward
)
3859 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3861 #else /* !ARGS_GROW_DOWNWARD */
3863 #ifdef REG_PARM_STACK_SPACE
3864 || REG_PARM_STACK_SPACE (fndecl
) > 0
3867 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3868 &locate
->alignment_pad
);
3869 locate
->slot_offset
= *initial_offset_ptr
;
3871 #ifdef PUSH_ROUNDING
3872 if (passed_mode
!= BLKmode
)
3873 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3876 /* Pad_below needs the pre-rounded size to know how much to pad below
3877 so this must be done before rounding up. */
3878 locate
->offset
= locate
->slot_offset
;
3879 if (where_pad
== downward
)
3880 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3882 if (where_pad
!= none
3883 && (!host_integerp (sizetree
, 1)
3884 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3885 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
3887 ADD_PARM_SIZE (locate
->size
, sizetree
);
3889 locate
->size
.constant
-= part_size_in_regs
;
3890 #endif /* ARGS_GROW_DOWNWARD */
3892 #ifdef FUNCTION_ARG_OFFSET
3893 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3897 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3898 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3901 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3902 struct args_size
*alignment_pad
)
3904 tree save_var
= NULL_TREE
;
3905 HOST_WIDE_INT save_constant
= 0;
3906 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3907 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3909 #ifdef SPARC_STACK_BOUNDARY_HACK
3910 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3911 the real alignment of %sp. However, when it does this, the
3912 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3913 if (SPARC_STACK_BOUNDARY_HACK
)
3917 if (boundary
> PARM_BOUNDARY
)
3919 save_var
= offset_ptr
->var
;
3920 save_constant
= offset_ptr
->constant
;
3923 alignment_pad
->var
= NULL_TREE
;
3924 alignment_pad
->constant
= 0;
3926 if (boundary
> BITS_PER_UNIT
)
3928 if (offset_ptr
->var
)
3930 tree sp_offset_tree
= ssize_int (sp_offset
);
3931 tree offset
= size_binop (PLUS_EXPR
,
3932 ARGS_SIZE_TREE (*offset_ptr
),
3934 #ifdef ARGS_GROW_DOWNWARD
3935 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3937 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3940 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3941 /* ARGS_SIZE_TREE includes constant term. */
3942 offset_ptr
->constant
= 0;
3943 if (boundary
> PARM_BOUNDARY
)
3944 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3949 offset_ptr
->constant
= -sp_offset
+
3950 #ifdef ARGS_GROW_DOWNWARD
3951 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3953 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3955 if (boundary
> PARM_BOUNDARY
)
3956 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3962 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3964 if (passed_mode
!= BLKmode
)
3966 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3967 offset_ptr
->constant
3968 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3969 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3970 - GET_MODE_SIZE (passed_mode
));
3974 if (TREE_CODE (sizetree
) != INTEGER_CST
3975 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3977 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3978 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3980 ADD_PARM_SIZE (*offset_ptr
, s2
);
3981 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3987 /* True if register REGNO was alive at a place where `setjmp' was
3988 called and was set more than once or is an argument. Such regs may
3989 be clobbered by `longjmp'. */
3992 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3994 /* There appear to be cases where some local vars never reach the
3995 backend but have bogus regnos. */
3996 if (regno
>= max_reg_num ())
3999 return ((REG_N_SETS (regno
) > 1
4000 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
4001 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4004 /* Walk the tree of blocks describing the binding levels within a
4005 function and warn about variables the might be killed by setjmp or
4006 vfork. This is done after calling flow_analysis before register
4007 allocation since that will clobber the pseudo-regs to hard
4011 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4015 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4017 if (TREE_CODE (decl
) == VAR_DECL
4018 && DECL_RTL_SET_P (decl
)
4019 && REG_P (DECL_RTL (decl
))
4020 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4021 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4022 " %<longjmp%> or %<vfork%>", decl
);
4025 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4026 setjmp_vars_warning (setjmp_crosses
, sub
);
4029 /* Do the appropriate part of setjmp_vars_warning
4030 but for arguments instead of local variables. */
4033 setjmp_args_warning (bitmap setjmp_crosses
)
4036 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4037 decl
; decl
= DECL_CHAIN (decl
))
4038 if (DECL_RTL (decl
) != 0
4039 && REG_P (DECL_RTL (decl
))
4040 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4041 warning (OPT_Wclobbered
,
4042 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4046 /* Generate warning messages for variables live across setjmp. */
4049 generate_setjmp_warnings (void)
4051 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4053 if (n_basic_blocks
== NUM_FIXED_BLOCKS
4054 || bitmap_empty_p (setjmp_crosses
))
4057 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4058 setjmp_args_warning (setjmp_crosses
);
4062 /* Reverse the order of elements in the fragment chain T of blocks,
4063 and return the new head of the chain (old last element).
4064 In addition to that clear BLOCK_SAME_RANGE flags when needed
4065 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4066 its super fragment origin. */
4069 block_fragments_nreverse (tree t
)
4071 tree prev
= 0, block
, next
, prev_super
= 0;
4072 tree super
= BLOCK_SUPERCONTEXT (t
);
4073 if (BLOCK_FRAGMENT_ORIGIN (super
))
4074 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4075 for (block
= t
; block
; block
= next
)
4077 next
= BLOCK_FRAGMENT_CHAIN (block
);
4078 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4079 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4080 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4082 BLOCK_SAME_RANGE (block
) = 0;
4083 prev_super
= BLOCK_SUPERCONTEXT (block
);
4084 BLOCK_SUPERCONTEXT (block
) = super
;
4087 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4088 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4090 BLOCK_SAME_RANGE (t
) = 0;
4091 BLOCK_SUPERCONTEXT (t
) = super
;
4095 /* Reverse the order of elements in the chain T of blocks,
4096 and return the new head of the chain (old last element).
4097 Also do the same on subblocks and reverse the order of elements
4098 in BLOCK_FRAGMENT_CHAIN as well. */
4101 blocks_nreverse_all (tree t
)
4103 tree prev
= 0, block
, next
;
4104 for (block
= t
; block
; block
= next
)
4106 next
= BLOCK_CHAIN (block
);
4107 BLOCK_CHAIN (block
) = prev
;
4108 if (BLOCK_FRAGMENT_CHAIN (block
)
4109 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4111 BLOCK_FRAGMENT_CHAIN (block
)
4112 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4113 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4114 BLOCK_SAME_RANGE (block
) = 0;
4116 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4123 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4124 and create duplicate blocks. */
4125 /* ??? Need an option to either create block fragments or to create
4126 abstract origin duplicates of a source block. It really depends
4127 on what optimization has been performed. */
4130 reorder_blocks (void)
4132 tree block
= DECL_INITIAL (current_function_decl
);
4134 if (block
== NULL_TREE
)
4137 stack_vec
<tree
, 10> block_stack
;
4139 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4140 clear_block_marks (block
);
4142 /* Prune the old trees away, so that they don't get in the way. */
4143 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4144 BLOCK_CHAIN (block
) = NULL_TREE
;
4146 /* Recreate the block tree from the note nesting. */
4147 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4148 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4151 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4154 clear_block_marks (tree block
)
4158 TREE_ASM_WRITTEN (block
) = 0;
4159 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4160 block
= BLOCK_CHAIN (block
);
4165 reorder_blocks_1 (rtx insns
, tree current_block
, vec
<tree
> *p_block_stack
)
4168 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4170 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4174 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4176 tree block
= NOTE_BLOCK (insn
);
4179 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4183 BLOCK_SAME_RANGE (prev_end
) = 0;
4184 prev_end
= NULL_TREE
;
4186 /* If we have seen this block before, that means it now
4187 spans multiple address regions. Create a new fragment. */
4188 if (TREE_ASM_WRITTEN (block
))
4190 tree new_block
= copy_node (block
);
4192 BLOCK_SAME_RANGE (new_block
) = 0;
4193 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4194 BLOCK_FRAGMENT_CHAIN (new_block
)
4195 = BLOCK_FRAGMENT_CHAIN (origin
);
4196 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4198 NOTE_BLOCK (insn
) = new_block
;
4202 if (prev_beg
== current_block
&& prev_beg
)
4203 BLOCK_SAME_RANGE (block
) = 1;
4207 BLOCK_SUBBLOCKS (block
) = 0;
4208 TREE_ASM_WRITTEN (block
) = 1;
4209 /* When there's only one block for the entire function,
4210 current_block == block and we mustn't do this, it
4211 will cause infinite recursion. */
4212 if (block
!= current_block
)
4215 if (block
!= origin
)
4216 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4217 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4220 if (p_block_stack
->is_empty ())
4221 super
= current_block
;
4224 super
= p_block_stack
->last ();
4225 gcc_assert (super
== current_block
4226 || BLOCK_FRAGMENT_ORIGIN (super
)
4229 BLOCK_SUPERCONTEXT (block
) = super
;
4230 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4231 BLOCK_SUBBLOCKS (current_block
) = block
;
4232 current_block
= origin
;
4234 p_block_stack
->safe_push (block
);
4236 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4238 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4239 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4240 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4241 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4242 prev_beg
= NULL_TREE
;
4243 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4244 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4249 prev_beg
= NULL_TREE
;
4251 BLOCK_SAME_RANGE (prev_end
) = 0;
4252 prev_end
= NULL_TREE
;
4257 /* Reverse the order of elements in the chain T of blocks,
4258 and return the new head of the chain (old last element). */
4261 blocks_nreverse (tree t
)
4263 tree prev
= 0, block
, next
;
4264 for (block
= t
; block
; block
= next
)
4266 next
= BLOCK_CHAIN (block
);
4267 BLOCK_CHAIN (block
) = prev
;
4273 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4274 by modifying the last node in chain 1 to point to chain 2. */
4277 block_chainon (tree op1
, tree op2
)
4286 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4288 BLOCK_CHAIN (t1
) = op2
;
4290 #ifdef ENABLE_TREE_CHECKING
4293 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4294 gcc_assert (t2
!= t1
);
4301 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4302 non-NULL, list them all into VECTOR, in a depth-first preorder
4303 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4307 all_blocks (tree block
, tree
*vector
)
4313 TREE_ASM_WRITTEN (block
) = 0;
4315 /* Record this block. */
4317 vector
[n_blocks
] = block
;
4321 /* Record the subblocks, and their subblocks... */
4322 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4323 vector
? vector
+ n_blocks
: 0);
4324 block
= BLOCK_CHAIN (block
);
4330 /* Return a vector containing all the blocks rooted at BLOCK. The
4331 number of elements in the vector is stored in N_BLOCKS_P. The
4332 vector is dynamically allocated; it is the caller's responsibility
4333 to call `free' on the pointer returned. */
4336 get_block_vector (tree block
, int *n_blocks_p
)
4340 *n_blocks_p
= all_blocks (block
, NULL
);
4341 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4342 all_blocks (block
, block_vector
);
4344 return block_vector
;
4347 static GTY(()) int next_block_index
= 2;
4349 /* Set BLOCK_NUMBER for all the blocks in FN. */
4352 number_blocks (tree fn
)
4358 /* For SDB and XCOFF debugging output, we start numbering the blocks
4359 from 1 within each function, rather than keeping a running
4361 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4362 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4363 next_block_index
= 1;
4366 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4368 /* The top-level BLOCK isn't numbered at all. */
4369 for (i
= 1; i
< n_blocks
; ++i
)
4370 /* We number the blocks from two. */
4371 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4373 free (block_vector
);
4378 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4381 debug_find_var_in_block_tree (tree var
, tree block
)
4385 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4389 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4391 tree ret
= debug_find_var_in_block_tree (var
, t
);
4399 /* Keep track of whether we're in a dummy function context. If we are,
4400 we don't want to invoke the set_current_function hook, because we'll
4401 get into trouble if the hook calls target_reinit () recursively or
4402 when the initial initialization is not yet complete. */
4404 static bool in_dummy_function
;
4406 /* Invoke the target hook when setting cfun. Update the optimization options
4407 if the function uses different options than the default. */
4410 invoke_set_current_function_hook (tree fndecl
)
4412 if (!in_dummy_function
)
4414 tree opts
= ((fndecl
)
4415 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4416 : optimization_default_node
);
4419 opts
= optimization_default_node
;
4421 /* Change optimization options if needed. */
4422 if (optimization_current_node
!= opts
)
4424 optimization_current_node
= opts
;
4425 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4428 targetm
.set_current_function (fndecl
);
4429 this_fn_optabs
= this_target_optabs
;
4431 if (opts
!= optimization_default_node
)
4433 init_tree_optimization_optabs (opts
);
4434 if (TREE_OPTIMIZATION_OPTABS (opts
))
4435 this_fn_optabs
= (struct target_optabs
*)
4436 TREE_OPTIMIZATION_OPTABS (opts
);
4441 /* cfun should never be set directly; use this function. */
4444 set_cfun (struct function
*new_cfun
)
4446 if (cfun
!= new_cfun
)
4449 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4453 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4455 static vec
<function_p
> cfun_stack
;
4457 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4458 current_function_decl accordingly. */
4461 push_cfun (struct function
*new_cfun
)
4463 gcc_assert ((!cfun
&& !current_function_decl
)
4464 || (cfun
&& current_function_decl
== cfun
->decl
));
4465 cfun_stack
.safe_push (cfun
);
4466 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4467 set_cfun (new_cfun
);
4470 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4475 struct function
*new_cfun
= cfun_stack
.pop ();
4476 /* When in_dummy_function, we do have a cfun but current_function_decl is
4477 NULL. We also allow pushing NULL cfun and subsequently changing
4478 current_function_decl to something else and have both restored by
4480 gcc_checking_assert (in_dummy_function
4482 || current_function_decl
== cfun
->decl
);
4483 set_cfun (new_cfun
);
4484 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4487 /* Return value of funcdef and increase it. */
4489 get_next_funcdef_no (void)
4491 return funcdef_no
++;
4494 /* Return value of funcdef. */
4496 get_last_funcdef_no (void)
4501 /* Allocate a function structure for FNDECL and set its contents
4502 to the defaults. Set cfun to the newly-allocated object.
4503 Some of the helper functions invoked during initialization assume
4504 that cfun has already been set. Therefore, assign the new object
4505 directly into cfun and invoke the back end hook explicitly at the
4506 very end, rather than initializing a temporary and calling set_cfun
4509 ABSTRACT_P is true if this is a function that will never be seen by
4510 the middle-end. Such functions are front-end concepts (like C++
4511 function templates) that do not correspond directly to functions
4512 placed in object files. */
4515 allocate_struct_function (tree fndecl
, bool abstract_p
)
4517 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4519 cfun
= ggc_alloc_cleared_function ();
4521 init_eh_for_function ();
4523 if (init_machine_status
)
4524 cfun
->machine
= (*init_machine_status
) ();
4526 #ifdef OVERRIDE_ABI_FORMAT
4527 OVERRIDE_ABI_FORMAT (fndecl
);
4530 if (fndecl
!= NULL_TREE
)
4532 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4533 cfun
->decl
= fndecl
;
4534 current_function_funcdef_no
= get_next_funcdef_no ();
4537 invoke_set_current_function_hook (fndecl
);
4539 if (fndecl
!= NULL_TREE
)
4541 tree result
= DECL_RESULT (fndecl
);
4542 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4544 #ifdef PCC_STATIC_STRUCT_RETURN
4545 cfun
->returns_pcc_struct
= 1;
4547 cfun
->returns_struct
= 1;
4550 cfun
->stdarg
= stdarg_p (fntype
);
4552 /* Assume all registers in stdarg functions need to be saved. */
4553 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4554 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4556 /* ??? This could be set on a per-function basis by the front-end
4557 but is this worth the hassle? */
4558 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4562 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4563 instead of just setting it. */
4566 push_struct_function (tree fndecl
)
4568 /* When in_dummy_function we might be in the middle of a pop_cfun and
4569 current_function_decl and cfun may not match. */
4570 gcc_assert (in_dummy_function
4571 || (!cfun
&& !current_function_decl
)
4572 || (cfun
&& current_function_decl
== cfun
->decl
));
4573 cfun_stack
.safe_push (cfun
);
4574 current_function_decl
= fndecl
;
4575 allocate_struct_function (fndecl
, false);
4578 /* Reset crtl and other non-struct-function variables to defaults as
4579 appropriate for emitting rtl at the start of a function. */
4582 prepare_function_start (void)
4584 gcc_assert (!crtl
->emit
.x_last_insn
);
4587 init_varasm_status ();
4589 default_rtl_profile ();
4591 if (flag_stack_usage_info
)
4593 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4594 cfun
->su
->static_stack_size
= -1;
4597 cse_not_expected
= ! optimize
;
4599 /* Caller save not needed yet. */
4600 caller_save_needed
= 0;
4602 /* We haven't done register allocation yet. */
4605 /* Indicate that we have not instantiated virtual registers yet. */
4606 virtuals_instantiated
= 0;
4608 /* Indicate that we want CONCATs now. */
4609 generating_concat_p
= 1;
4611 /* Indicate we have no need of a frame pointer yet. */
4612 frame_pointer_needed
= 0;
4615 /* Initialize the rtl expansion mechanism so that we can do simple things
4616 like generate sequences. This is used to provide a context during global
4617 initialization of some passes. You must call expand_dummy_function_end
4618 to exit this context. */
4621 init_dummy_function_start (void)
4623 gcc_assert (!in_dummy_function
);
4624 in_dummy_function
= true;
4625 push_struct_function (NULL_TREE
);
4626 prepare_function_start ();
4629 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4630 and initialize static variables for generating RTL for the statements
4634 init_function_start (tree subr
)
4636 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4637 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4639 allocate_struct_function (subr
, false);
4640 prepare_function_start ();
4641 decide_function_section (subr
);
4643 /* Warn if this value is an aggregate type,
4644 regardless of which calling convention we are using for it. */
4645 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4646 warning (OPT_Waggregate_return
, "function returns an aggregate");
4649 /* Expand code to verify the stack_protect_guard. This is invoked at
4650 the end of a function to be protected. */
4652 #ifndef HAVE_stack_protect_test
4653 # define HAVE_stack_protect_test 0
4654 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4658 stack_protect_epilogue (void)
4660 tree guard_decl
= targetm
.stack_protect_guard ();
4661 rtx label
= gen_label_rtx ();
4664 x
= expand_normal (crtl
->stack_protect_guard
);
4665 y
= expand_normal (guard_decl
);
4667 /* Allow the target to compare Y with X without leaking either into
4669 switch (HAVE_stack_protect_test
!= 0)
4672 tmp
= gen_stack_protect_test (x
, y
, label
);
4681 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4685 /* The noreturn predictor has been moved to the tree level. The rtl-level
4686 predictors estimate this branch about 20%, which isn't enough to get
4687 things moved out of line. Since this is the only extant case of adding
4688 a noreturn function at the rtl level, it doesn't seem worth doing ought
4689 except adding the prediction by hand. */
4690 tmp
= get_last_insn ();
4692 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4694 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4699 /* Start the RTL for a new function, and set variables used for
4701 SUBR is the FUNCTION_DECL node.
4702 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4703 the function's parameters, which must be run at any return statement. */
4706 expand_function_start (tree subr
)
4708 /* Make sure volatile mem refs aren't considered
4709 valid operands of arithmetic insns. */
4710 init_recog_no_volatile ();
4714 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4717 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4719 /* Make the label for return statements to jump to. Do not special
4720 case machines with special return instructions -- they will be
4721 handled later during jump, ifcvt, or epilogue creation. */
4722 return_label
= gen_label_rtx ();
4724 /* Initialize rtx used to return the value. */
4725 /* Do this before assign_parms so that we copy the struct value address
4726 before any library calls that assign parms might generate. */
4728 /* Decide whether to return the value in memory or in a register. */
4729 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4731 /* Returning something that won't go in a register. */
4732 rtx value_address
= 0;
4734 #ifdef PCC_STATIC_STRUCT_RETURN
4735 if (cfun
->returns_pcc_struct
)
4737 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4738 value_address
= assemble_static_space (size
);
4743 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4744 /* Expect to be passed the address of a place to store the value.
4745 If it is passed as an argument, assign_parms will take care of
4749 value_address
= gen_reg_rtx (Pmode
);
4750 emit_move_insn (value_address
, sv
);
4755 rtx x
= value_address
;
4756 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4758 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4759 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4761 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4764 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4765 /* If return mode is void, this decl rtl should not be used. */
4766 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4769 /* Compute the return values into a pseudo reg, which we will copy
4770 into the true return register after the cleanups are done. */
4771 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4772 if (TYPE_MODE (return_type
) != BLKmode
4773 && targetm
.calls
.return_in_msb (return_type
))
4774 /* expand_function_end will insert the appropriate padding in
4775 this case. Use the return value's natural (unpadded) mode
4776 within the function proper. */
4777 SET_DECL_RTL (DECL_RESULT (subr
),
4778 gen_reg_rtx (TYPE_MODE (return_type
)));
4781 /* In order to figure out what mode to use for the pseudo, we
4782 figure out what the mode of the eventual return register will
4783 actually be, and use that. */
4784 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4786 /* Structures that are returned in registers are not
4787 aggregate_value_p, so we may see a PARALLEL or a REG. */
4788 if (REG_P (hard_reg
))
4789 SET_DECL_RTL (DECL_RESULT (subr
),
4790 gen_reg_rtx (GET_MODE (hard_reg
)));
4793 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4794 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4798 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4799 result to the real return register(s). */
4800 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4803 /* Initialize rtx for parameters and local variables.
4804 In some cases this requires emitting insns. */
4805 assign_parms (subr
);
4807 /* If function gets a static chain arg, store it. */
4808 if (cfun
->static_chain_decl
)
4810 tree parm
= cfun
->static_chain_decl
;
4811 rtx local
, chain
, insn
;
4813 local
= gen_reg_rtx (Pmode
);
4814 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4816 set_decl_incoming_rtl (parm
, chain
, false);
4817 SET_DECL_RTL (parm
, local
);
4818 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4820 insn
= emit_move_insn (local
, chain
);
4822 /* Mark the register as eliminable, similar to parameters. */
4824 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4825 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
4828 /* If the function receives a non-local goto, then store the
4829 bits we need to restore the frame pointer. */
4830 if (cfun
->nonlocal_goto_save_area
)
4835 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4836 gcc_assert (DECL_RTL_SET_P (var
));
4838 t_save
= build4 (ARRAY_REF
,
4839 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
4840 cfun
->nonlocal_goto_save_area
,
4841 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4842 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4843 gcc_assert (GET_MODE (r_save
) == Pmode
);
4845 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4846 update_nonlocal_goto_save_area ();
4849 /* The following was moved from init_function_start.
4850 The move is supposed to make sdb output more accurate. */
4851 /* Indicate the beginning of the function body,
4852 as opposed to parm setup. */
4853 emit_note (NOTE_INSN_FUNCTION_BEG
);
4855 gcc_assert (NOTE_P (get_last_insn ()));
4857 parm_birth_insn
= get_last_insn ();
4862 PROFILE_HOOK (current_function_funcdef_no
);
4866 /* If we are doing generic stack checking, the probe should go here. */
4867 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4868 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4871 /* Undo the effects of init_dummy_function_start. */
4873 expand_dummy_function_end (void)
4875 gcc_assert (in_dummy_function
);
4877 /* End any sequences that failed to be closed due to syntax errors. */
4878 while (in_sequence_p ())
4881 /* Outside function body, can't compute type's actual size
4882 until next function's body starts. */
4884 free_after_parsing (cfun
);
4885 free_after_compilation (cfun
);
4887 in_dummy_function
= false;
4890 /* Call DOIT for each hard register used as a return value from
4891 the current function. */
4894 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4896 rtx outgoing
= crtl
->return_rtx
;
4901 if (REG_P (outgoing
))
4902 (*doit
) (outgoing
, arg
);
4903 else if (GET_CODE (outgoing
) == PARALLEL
)
4907 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4909 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4911 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4918 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4924 clobber_return_register (void)
4926 diddle_return_value (do_clobber_return_reg
, NULL
);
4928 /* In case we do use pseudo to return value, clobber it too. */
4929 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4931 tree decl_result
= DECL_RESULT (current_function_decl
);
4932 rtx decl_rtl
= DECL_RTL (decl_result
);
4933 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4935 do_clobber_return_reg (decl_rtl
, NULL
);
4941 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4947 use_return_register (void)
4949 diddle_return_value (do_use_return_reg
, NULL
);
4952 /* Possibly warn about unused parameters. */
4954 do_warn_unused_parameter (tree fn
)
4958 for (decl
= DECL_ARGUMENTS (fn
);
4959 decl
; decl
= DECL_CHAIN (decl
))
4960 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4961 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4962 && !TREE_NO_WARNING (decl
))
4963 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4966 /* Set the location of the insn chain starting at INSN to LOC. */
4969 set_insn_locations (rtx insn
, int loc
)
4971 while (insn
!= NULL_RTX
)
4974 INSN_LOCATION (insn
) = loc
;
4975 insn
= NEXT_INSN (insn
);
4979 /* Generate RTL for the end of the current function. */
4982 expand_function_end (void)
4986 /* If arg_pointer_save_area was referenced only from a nested
4987 function, we will not have initialized it yet. Do that now. */
4988 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4989 get_arg_pointer_save_area ();
4991 /* If we are doing generic stack checking and this function makes calls,
4992 do a stack probe at the start of the function to ensure we have enough
4993 space for another stack frame. */
4994 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4998 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5001 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5003 if (STACK_CHECK_MOVING_SP
)
5004 anti_adjust_stack_and_probe (max_frame_size
, true);
5006 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5009 set_insn_locations (seq
, prologue_location
);
5010 emit_insn_before (seq
, stack_check_probe_note
);
5015 /* End any sequences that failed to be closed due to syntax errors. */
5016 while (in_sequence_p ())
5019 clear_pending_stack_adjust ();
5020 do_pending_stack_adjust ();
5022 /* Output a linenumber for the end of the function.
5023 SDB depends on this. */
5024 set_curr_insn_location (input_location
);
5026 /* Before the return label (if any), clobber the return
5027 registers so that they are not propagated live to the rest of
5028 the function. This can only happen with functions that drop
5029 through; if there had been a return statement, there would
5030 have either been a return rtx, or a jump to the return label.
5032 We delay actual code generation after the current_function_value_rtx
5034 clobber_after
= get_last_insn ();
5036 /* Output the label for the actual return from the function. */
5037 emit_label (return_label
);
5039 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5041 /* Let except.c know where it should emit the call to unregister
5042 the function context for sjlj exceptions. */
5043 if (flag_exceptions
)
5044 sjlj_emit_function_exit_after (get_last_insn ());
5048 /* We want to ensure that instructions that may trap are not
5049 moved into the epilogue by scheduling, because we don't
5050 always emit unwind information for the epilogue. */
5051 if (cfun
->can_throw_non_call_exceptions
)
5052 emit_insn (gen_blockage ());
5055 /* If this is an implementation of throw, do what's necessary to
5056 communicate between __builtin_eh_return and the epilogue. */
5057 expand_eh_return ();
5059 /* If scalar return value was computed in a pseudo-reg, or was a named
5060 return value that got dumped to the stack, copy that to the hard
5062 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5064 tree decl_result
= DECL_RESULT (current_function_decl
);
5065 rtx decl_rtl
= DECL_RTL (decl_result
);
5067 if (REG_P (decl_rtl
)
5068 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5069 : DECL_REGISTER (decl_result
))
5071 rtx real_decl_rtl
= crtl
->return_rtx
;
5073 /* This should be set in assign_parms. */
5074 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5076 /* If this is a BLKmode structure being returned in registers,
5077 then use the mode computed in expand_return. Note that if
5078 decl_rtl is memory, then its mode may have been changed,
5079 but that crtl->return_rtx has not. */
5080 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5081 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5083 /* If a non-BLKmode return value should be padded at the least
5084 significant end of the register, shift it left by the appropriate
5085 amount. BLKmode results are handled using the group load/store
5087 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5088 && REG_P (real_decl_rtl
)
5089 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5091 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5092 REGNO (real_decl_rtl
)),
5094 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5096 /* If a named return value dumped decl_return to memory, then
5097 we may need to re-do the PROMOTE_MODE signed/unsigned
5099 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5101 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5102 promote_function_mode (TREE_TYPE (decl_result
),
5103 GET_MODE (decl_rtl
), &unsignedp
,
5104 TREE_TYPE (current_function_decl
), 1);
5106 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5108 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5110 /* If expand_function_start has created a PARALLEL for decl_rtl,
5111 move the result to the real return registers. Otherwise, do
5112 a group load from decl_rtl for a named return. */
5113 if (GET_CODE (decl_rtl
) == PARALLEL
)
5114 emit_group_move (real_decl_rtl
, decl_rtl
);
5116 emit_group_load (real_decl_rtl
, decl_rtl
,
5117 TREE_TYPE (decl_result
),
5118 int_size_in_bytes (TREE_TYPE (decl_result
)));
5120 /* In the case of complex integer modes smaller than a word, we'll
5121 need to generate some non-trivial bitfield insertions. Do that
5122 on a pseudo and not the hard register. */
5123 else if (GET_CODE (decl_rtl
) == CONCAT
5124 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5125 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5127 int old_generating_concat_p
;
5130 old_generating_concat_p
= generating_concat_p
;
5131 generating_concat_p
= 0;
5132 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5133 generating_concat_p
= old_generating_concat_p
;
5135 emit_move_insn (tmp
, decl_rtl
);
5136 emit_move_insn (real_decl_rtl
, tmp
);
5139 emit_move_insn (real_decl_rtl
, decl_rtl
);
5143 /* If returning a structure, arrange to return the address of the value
5144 in a place where debuggers expect to find it.
5146 If returning a structure PCC style,
5147 the caller also depends on this value.
5148 And cfun->returns_pcc_struct is not necessarily set. */
5149 if (cfun
->returns_struct
5150 || cfun
->returns_pcc_struct
)
5152 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5153 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5156 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5157 type
= TREE_TYPE (type
);
5159 value_address
= XEXP (value_address
, 0);
5161 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5162 current_function_decl
, true);
5164 /* Mark this as a function return value so integrate will delete the
5165 assignment and USE below when inlining this function. */
5166 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5168 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5169 value_address
= convert_memory_address (GET_MODE (outgoing
),
5172 emit_move_insn (outgoing
, value_address
);
5174 /* Show return register used to hold result (in this case the address
5176 crtl
->return_rtx
= outgoing
;
5179 /* Emit the actual code to clobber return register. */
5184 clobber_return_register ();
5188 emit_insn_after (seq
, clobber_after
);
5191 /* Output the label for the naked return from the function. */
5192 if (naked_return_label
)
5193 emit_label (naked_return_label
);
5195 /* @@@ This is a kludge. We want to ensure that instructions that
5196 may trap are not moved into the epilogue by scheduling, because
5197 we don't always emit unwind information for the epilogue. */
5198 if (cfun
->can_throw_non_call_exceptions
5199 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5200 emit_insn (gen_blockage ());
5202 /* If stack protection is enabled for this function, check the guard. */
5203 if (crtl
->stack_protect_guard
)
5204 stack_protect_epilogue ();
5206 /* If we had calls to alloca, and this machine needs
5207 an accurate stack pointer to exit the function,
5208 insert some code to save and restore the stack pointer. */
5209 if (! EXIT_IGNORE_STACK
5210 && cfun
->calls_alloca
)
5215 emit_stack_save (SAVE_FUNCTION
, &tem
);
5218 emit_insn_before (seq
, parm_birth_insn
);
5220 emit_stack_restore (SAVE_FUNCTION
, tem
);
5223 /* ??? This should no longer be necessary since stupid is no longer with
5224 us, but there are some parts of the compiler (eg reload_combine, and
5225 sh mach_dep_reorg) that still try and compute their own lifetime info
5226 instead of using the general framework. */
5227 use_return_register ();
5231 get_arg_pointer_save_area (void)
5233 rtx ret
= arg_pointer_save_area
;
5237 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5238 arg_pointer_save_area
= ret
;
5241 if (! crtl
->arg_pointer_save_area_init
)
5245 /* Save the arg pointer at the beginning of the function. The
5246 generated stack slot may not be a valid memory address, so we
5247 have to check it and fix it if necessary. */
5249 emit_move_insn (validize_mem (ret
),
5250 crtl
->args
.internal_arg_pointer
);
5254 push_topmost_sequence ();
5255 emit_insn_after (seq
, entry_of_function ());
5256 pop_topmost_sequence ();
5258 crtl
->arg_pointer_save_area_init
= true;
5264 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5265 for the first time. */
5268 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5271 htab_t hash
= *hashp
;
5275 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5277 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5279 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5280 gcc_assert (*slot
== NULL
);
5285 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5286 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5287 insn, then record COPY as well. */
5290 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5295 hash
= epilogue_insn_hash
;
5296 if (!hash
|| !htab_find (hash
, insn
))
5298 hash
= prologue_insn_hash
;
5299 if (!hash
|| !htab_find (hash
, insn
))
5303 slot
= htab_find_slot (hash
, copy
, INSERT
);
5304 gcc_assert (*slot
== NULL
);
5308 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5309 we can be running after reorg, SEQUENCE rtl is possible. */
5312 contains (const_rtx insn
, htab_t hash
)
5317 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5320 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5321 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5326 return htab_find (hash
, insn
) != NULL
;
5330 prologue_epilogue_contains (const_rtx insn
)
5332 if (contains (insn
, prologue_insn_hash
))
5334 if (contains (insn
, epilogue_insn_hash
))
5339 #ifdef HAVE_simple_return
5341 /* Return true if INSN requires the stack frame to be set up.
5342 PROLOGUE_USED contains the hard registers used in the function
5343 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5344 prologue to set up for the function. */
5346 requires_stack_frame_p (rtx insn
, HARD_REG_SET prologue_used
,
5347 HARD_REG_SET set_up_by_prologue
)
5350 HARD_REG_SET hardregs
;
5354 return !SIBLING_CALL_P (insn
);
5356 /* We need a frame to get the unique CFA expected by the unwinder. */
5357 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5360 CLEAR_HARD_REG_SET (hardregs
);
5361 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
5363 rtx dreg
= DF_REF_REG (*df_rec
);
5368 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
5371 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
5373 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
5374 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5375 if (TEST_HARD_REG_BIT (hardregs
, regno
)
5376 && df_regs_ever_live_p (regno
))
5379 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
5381 rtx reg
= DF_REF_REG (*df_rec
);
5386 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
5389 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
5395 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5396 and if BB is its only predecessor. Return that block if so,
5397 otherwise return null. */
5400 next_block_for_reg (basic_block bb
, int regno
, int end_regno
)
5408 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5410 live
= df_get_live_in (e
->dest
);
5411 for (i
= regno
; i
< end_regno
; i
++)
5412 if (REGNO_REG_SET_P (live
, i
))
5414 if (live_edge
&& live_edge
!= e
)
5420 /* We can sometimes encounter dead code. Don't try to move it
5421 into the exit block. */
5422 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR
)
5425 /* Reject targets of abnormal edges. This is needed for correctness
5426 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5427 exception edges even though it is generally treated as call-saved
5428 for the majority of the compilation. Moving across abnormal edges
5429 isn't going to be interesting for shrink-wrap usage anyway. */
5430 if (live_edge
->flags
& EDGE_ABNORMAL
)
5433 if (EDGE_COUNT (live_edge
->dest
->preds
) > 1)
5436 return live_edge
->dest
;
5439 /* Try to move INSN from BB to a successor. Return true on success.
5440 USES and DEFS are the set of registers that are used and defined
5441 after INSN in BB. */
5444 move_insn_for_shrink_wrap (basic_block bb
, rtx insn
,
5445 const HARD_REG_SET uses
,
5446 const HARD_REG_SET defs
)
5449 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
5450 unsigned int i
, dregno
, end_dregno
, sregno
, end_sregno
;
5451 basic_block next_block
;
5453 /* Look for a simple register copy. */
5454 set
= single_set (insn
);
5457 src
= SET_SRC (set
);
5458 dest
= SET_DEST (set
);
5459 if (!REG_P (dest
) || !REG_P (src
))
5462 /* Make sure that the source register isn't defined later in BB. */
5463 sregno
= REGNO (src
);
5464 end_sregno
= END_REGNO (src
);
5465 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
5468 /* Make sure that the destination register isn't referenced later in BB. */
5469 dregno
= REGNO (dest
);
5470 end_dregno
= END_REGNO (dest
);
5471 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
5472 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
5475 /* See whether there is a successor block to which we could move INSN. */
5476 next_block
= next_block_for_reg (bb
, dregno
, end_dregno
);
5480 /* At this point we are committed to moving INSN, but let's try to
5481 move it as far as we can. */
5484 live_out
= df_get_live_out (bb
);
5485 live_in
= df_get_live_in (next_block
);
5488 /* Check whether BB uses DEST or clobbers DEST. We need to add
5489 INSN to BB if so. Either way, DEST is no longer live on entry,
5490 except for any part that overlaps SRC (next loop). */
5491 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
5492 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
5495 for (i
= dregno
; i
< end_dregno
; i
++)
5497 if (REGNO_REG_SET_P (bb_uses
, i
) || REGNO_REG_SET_P (bb_defs
, i
)
5498 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5500 CLEAR_REGNO_REG_SET (live_out
, i
);
5501 CLEAR_REGNO_REG_SET (live_in
, i
);
5504 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5505 Either way, SRC is now live on entry. */
5506 for (i
= sregno
; i
< end_sregno
; i
++)
5508 if (REGNO_REG_SET_P (bb_defs
, i
)
5509 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb
)->gen
, i
))
5511 SET_REGNO_REG_SET (live_out
, i
);
5512 SET_REGNO_REG_SET (live_in
, i
);
5517 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
5518 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
5519 at -O1, just give up searching NEXT_BLOCK. */
5521 for (i
= dregno
; i
< end_dregno
; i
++)
5523 CLEAR_REGNO_REG_SET (live_out
, i
);
5524 CLEAR_REGNO_REG_SET (live_in
, i
);
5527 for (i
= sregno
; i
< end_sregno
; i
++)
5529 SET_REGNO_REG_SET (live_out
, i
);
5530 SET_REGNO_REG_SET (live_in
, i
);
5534 /* If we don't need to add the move to BB, look for a single
5537 next_block
= next_block_for_reg (next_block
, dregno
, end_dregno
);
5541 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5543 for (i
= dregno
; i
< end_dregno
; i
++)
5545 CLEAR_REGNO_REG_SET (bb_uses
, i
);
5546 SET_REGNO_REG_SET (bb_defs
, i
);
5549 /* BB now uses SRC. */
5550 for (i
= sregno
; i
< end_sregno
; i
++)
5551 SET_REGNO_REG_SET (bb_uses
, i
);
5553 emit_insn_after (PATTERN (insn
), bb_note (bb
));
5558 /* Look for register copies in the first block of the function, and move
5559 them down into successor blocks if the register is used only on one
5560 path. This exposes more opportunities for shrink-wrapping. These
5561 kinds of sets often occur when incoming argument registers are moved
5562 to call-saved registers because their values are live across one or
5563 more calls during the function. */
5566 prepare_shrink_wrap (basic_block entry_block
)
5569 HARD_REG_SET uses
, defs
;
5572 CLEAR_HARD_REG_SET (uses
);
5573 CLEAR_HARD_REG_SET (defs
);
5574 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
5575 if (NONDEBUG_INSN_P (insn
)
5576 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
))
5578 /* Add all defined registers to DEFs. */
5579 for (ref
= DF_INSN_DEFS (insn
); *ref
; ref
++)
5581 x
= DF_REF_REG (*ref
);
5582 if (REG_P (x
) && HARD_REGISTER_P (x
))
5583 SET_HARD_REG_BIT (defs
, REGNO (x
));
5586 /* Add all used registers to USESs. */
5587 for (ref
= DF_INSN_USES (insn
); *ref
; ref
++)
5589 x
= DF_REF_REG (*ref
);
5590 if (REG_P (x
) && HARD_REGISTER_P (x
))
5591 SET_HARD_REG_BIT (uses
, REGNO (x
));
5599 /* Insert use of return register before the end of BB. */
5602 emit_use_return_register_into_block (basic_block bb
)
5606 use_return_register ();
5611 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5612 insn
= prev_cc0_setter (insn
);
5614 emit_insn_before (seq
, insn
);
5618 /* Create a return pattern, either simple_return or return, depending on
5622 gen_return_pattern (bool simple_p
)
5624 #ifdef HAVE_simple_return
5625 return simple_p
? gen_simple_return () : gen_return ();
5627 gcc_assert (!simple_p
);
5628 return gen_return ();
5632 /* Insert an appropriate return pattern at the end of block BB. This
5633 also means updating block_for_insn appropriately. SIMPLE_P is
5634 the same as in gen_return_pattern and passed to it. */
5637 emit_return_into_block (bool simple_p
, basic_block bb
)
5640 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5641 pat
= PATTERN (jump
);
5642 if (GET_CODE (pat
) == PARALLEL
)
5643 pat
= XVECEXP (pat
, 0, 0);
5644 gcc_assert (ANY_RETURN_P (pat
));
5645 JUMP_LABEL (jump
) = pat
;
5649 /* Set JUMP_LABEL for a return insn. */
5652 set_return_jump_label (rtx returnjump
)
5654 rtx pat
= PATTERN (returnjump
);
5655 if (GET_CODE (pat
) == PARALLEL
)
5656 pat
= XVECEXP (pat
, 0, 0);
5657 if (ANY_RETURN_P (pat
))
5658 JUMP_LABEL (returnjump
) = pat
;
5660 JUMP_LABEL (returnjump
) = ret_rtx
;
5663 #ifdef HAVE_simple_return
5664 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5665 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5667 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx before
,
5668 bitmap_head
*need_prologue
)
5672 rtx insn
= BB_END (bb
);
5674 /* We know BB has a single successor, so there is no need to copy a
5675 simple jump at the end of BB. */
5676 if (simplejump_p (insn
))
5677 insn
= PREV_INSN (insn
);
5680 duplicate_insn_chain (BB_HEAD (bb
), insn
);
5684 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5685 if (active_insn_p (insn
))
5687 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
5688 bb
->index
, copy_bb
->index
, count
);
5690 insn
= get_insns ();
5692 emit_insn_before (insn
, before
);
5694 /* Redirect all the paths that need no prologue into copy_bb. */
5695 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
5696 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
5698 int freq
= EDGE_FREQUENCY (e
);
5699 copy_bb
->count
+= e
->count
;
5700 copy_bb
->frequency
+= EDGE_FREQUENCY (e
);
5701 e
->dest
->count
-= e
->count
;
5702 if (e
->dest
->count
< 0)
5704 e
->dest
->frequency
-= freq
;
5705 if (e
->dest
->frequency
< 0)
5706 e
->dest
->frequency
= 0;
5707 redirect_edge_and_branch_force (e
, copy_bb
);
5715 #if defined (HAVE_return) || defined (HAVE_simple_return)
5716 /* Return true if there are any active insns between HEAD and TAIL. */
5718 active_insn_between (rtx head
, rtx tail
)
5722 if (active_insn_p (tail
))
5726 tail
= PREV_INSN (tail
);
5731 /* LAST_BB is a block that exits, and empty of active instructions.
5732 Examine its predecessors for jumps that can be converted to
5733 (conditional) returns. */
5735 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5736 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5743 vec
<basic_block
> src_bbs
;
5745 src_bbs
.create (EDGE_COUNT (last_bb
->preds
));
5746 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5747 if (e
->src
!= ENTRY_BLOCK_PTR
)
5748 src_bbs
.quick_push (e
->src
);
5750 label
= BB_HEAD (last_bb
);
5752 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5754 rtx jump
= BB_END (bb
);
5756 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5759 e
= find_edge (bb
, last_bb
);
5761 /* If we have an unconditional jump, we can replace that
5762 with a simple return instruction. */
5763 if (simplejump_p (jump
))
5765 /* The use of the return register might be present in the exit
5766 fallthru block. Either:
5767 - removing the use is safe, and we should remove the use in
5768 the exit fallthru block, or
5769 - removing the use is not safe, and we should add it here.
5770 For now, we conservatively choose the latter. Either of the
5771 2 helps in crossjumping. */
5772 emit_use_return_register_into_block (bb
);
5774 emit_return_into_block (simple_p
, bb
);
5778 /* If we have a conditional jump branching to the last
5779 block, we can try to replace that with a conditional
5780 return instruction. */
5781 else if (condjump_p (jump
))
5786 dest
= simple_return_rtx
;
5789 if (!redirect_jump (jump
, dest
, 0))
5791 #ifdef HAVE_simple_return
5796 "Failed to redirect bb %d branch.\n", bb
->index
);
5797 unconverted
.safe_push (e
);
5803 /* See comment in simplejump_p case above. */
5804 emit_use_return_register_into_block (bb
);
5806 /* If this block has only one successor, it both jumps
5807 and falls through to the fallthru block, so we can't
5809 if (single_succ_p (bb
))
5814 #ifdef HAVE_simple_return
5819 "Failed to redirect bb %d branch.\n", bb
->index
);
5820 unconverted
.safe_push (e
);
5826 /* Fix up the CFG for the successful change we just made. */
5827 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5828 e
->flags
&= ~EDGE_CROSSING
;
5834 /* Emit a return insn for the exit fallthru block. */
5836 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5838 basic_block last_bb
= exit_fallthru_edge
->src
;
5840 if (JUMP_P (BB_END (last_bb
)))
5842 last_bb
= split_edge (exit_fallthru_edge
);
5843 exit_fallthru_edge
= single_succ_edge (last_bb
);
5845 emit_barrier_after (BB_END (last_bb
));
5846 emit_return_into_block (simple_p
, last_bb
);
5847 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5853 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5854 this into place with notes indicating where the prologue ends and where
5855 the epilogue begins. Update the basic block information when possible.
5857 Notes on epilogue placement:
5858 There are several kinds of edges to the exit block:
5859 * a single fallthru edge from LAST_BB
5860 * possibly, edges from blocks containing sibcalls
5861 * possibly, fake edges from infinite loops
5863 The epilogue is always emitted on the fallthru edge from the last basic
5864 block in the function, LAST_BB, into the exit block.
5866 If LAST_BB is empty except for a label, it is the target of every
5867 other basic block in the function that ends in a return. If a
5868 target has a return or simple_return pattern (possibly with
5869 conditional variants), these basic blocks can be changed so that a
5870 return insn is emitted into them, and their target is adjusted to
5871 the real exit block.
5873 Notes on shrink wrapping: We implement a fairly conservative
5874 version of shrink-wrapping rather than the textbook one. We only
5875 generate a single prologue and a single epilogue. This is
5876 sufficient to catch a number of interesting cases involving early
5879 First, we identify the blocks that require the prologue to occur before
5880 them. These are the ones that modify a call-saved register, or reference
5881 any of the stack or frame pointer registers. To simplify things, we then
5882 mark everything reachable from these blocks as also requiring a prologue.
5883 This takes care of loops automatically, and avoids the need to examine
5884 whether MEMs reference the frame, since it is sufficient to check for
5885 occurrences of the stack or frame pointer.
5887 We then compute the set of blocks for which the need for a prologue
5888 is anticipatable (borrowing terminology from the shrink-wrapping
5889 description in Muchnick's book). These are the blocks which either
5890 require a prologue themselves, or those that have only successors
5891 where the prologue is anticipatable. The prologue needs to be
5892 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5893 is not. For the moment, we ensure that only one such edge exists.
5895 The epilogue is placed as described above, but we make a
5896 distinction between inserting return and simple_return patterns
5897 when modifying other blocks that end in a return. Blocks that end
5898 in a sibcall omit the sibcall_epilogue if the block is not in
5902 thread_prologue_and_epilogue_insns (void)
5905 #ifdef HAVE_simple_return
5906 vec
<edge
> unconverted_simple_returns
= vNULL
;
5907 bool nonempty_prologue
;
5908 bitmap_head bb_flags
;
5909 unsigned max_grow_size
;
5912 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5913 rtx prologue_seq ATTRIBUTE_UNUSED
, split_prologue_seq ATTRIBUTE_UNUSED
;
5914 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5919 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
5923 epilogue_end
= NULL_RTX
;
5924 returnjump
= NULL_RTX
;
5926 /* Can't deal with multiple successors of the entry block at the
5927 moment. Function should always have at least one entry
5929 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5930 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
5931 orig_entry_edge
= entry_edge
;
5933 split_prologue_seq
= NULL_RTX
;
5934 if (flag_split_stack
5935 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5938 #ifndef HAVE_split_stack_prologue
5941 gcc_assert (HAVE_split_stack_prologue
);
5944 emit_insn (gen_split_stack_prologue ());
5945 split_prologue_seq
= get_insns ();
5948 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5949 set_insn_locations (split_prologue_seq
, prologue_location
);
5953 prologue_seq
= NULL_RTX
;
5954 #ifdef HAVE_prologue
5958 seq
= gen_prologue ();
5961 /* Insert an explicit USE for the frame pointer
5962 if the profiling is on and the frame pointer is required. */
5963 if (crtl
->profile
&& frame_pointer_needed
)
5964 emit_use (hard_frame_pointer_rtx
);
5966 /* Retain a map of the prologue insns. */
5967 record_insns (seq
, NULL
, &prologue_insn_hash
);
5968 emit_note (NOTE_INSN_PROLOGUE_END
);
5970 /* Ensure that instructions are not moved into the prologue when
5971 profiling is on. The call to the profiling routine can be
5972 emitted within the live range of a call-clobbered register. */
5973 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5974 emit_insn (gen_blockage ());
5976 prologue_seq
= get_insns ();
5978 set_insn_locations (prologue_seq
, prologue_location
);
5982 #ifdef HAVE_simple_return
5983 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5985 /* Try to perform a kind of shrink-wrapping, making sure the
5986 prologue/epilogue is emitted only around those parts of the
5987 function that require it. */
5989 nonempty_prologue
= false;
5990 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
5991 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
5993 nonempty_prologue
= true;
5997 if (flag_shrink_wrap
&& HAVE_simple_return
5998 && (targetm
.profile_before_prologue () || !crtl
->profile
)
5999 && nonempty_prologue
&& !crtl
->calls_eh_return
)
6001 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
6002 struct hard_reg_set_container set_up_by_prologue
;
6004 vec
<basic_block
> vec
;
6006 bitmap_head bb_antic_flags
;
6007 bitmap_head bb_on_list
;
6008 bitmap_head bb_tail
;
6011 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
6013 /* Compute the registers set and used in the prologue. */
6014 CLEAR_HARD_REG_SET (prologue_clobbered
);
6015 CLEAR_HARD_REG_SET (prologue_used
);
6016 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
6018 HARD_REG_SET this_used
;
6019 if (!NONDEBUG_INSN_P (p_insn
))
6022 CLEAR_HARD_REG_SET (this_used
);
6023 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
6025 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
6026 IOR_HARD_REG_SET (prologue_used
, this_used
);
6027 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
6028 &prologue_clobbered
);
6031 prepare_shrink_wrap (entry_edge
->dest
);
6033 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
6034 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
6035 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
6037 /* Find the set of basic blocks that require a stack frame,
6038 and blocks that are too big to be duplicated. */
6040 vec
.create (n_basic_blocks
);
6042 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
6043 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6044 STACK_POINTER_REGNUM
);
6045 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
6046 if (frame_pointer_needed
)
6047 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6048 HARD_FRAME_POINTER_REGNUM
);
6049 if (pic_offset_table_rtx
)
6050 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6051 PIC_OFFSET_TABLE_REGNUM
);
6053 add_to_hard_reg_set (&set_up_by_prologue
.set
,
6054 GET_MODE (crtl
->drap_reg
),
6055 REGNO (crtl
->drap_reg
));
6056 if (targetm
.set_up_by_prologue
)
6057 targetm
.set_up_by_prologue (&set_up_by_prologue
);
6059 /* We don't use a different max size depending on
6060 optimize_bb_for_speed_p because increasing shrink-wrapping
6061 opportunities by duplicating tail blocks can actually result
6062 in an overall decrease in code size. */
6063 max_grow_size
= get_uncond_jump_length ();
6064 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
6071 FOR_BB_INSNS (bb
, insn
)
6072 if (NONDEBUG_INSN_P (insn
))
6074 if (requires_stack_frame_p (insn
, prologue_used
,
6075 set_up_by_prologue
.set
))
6077 if (bb
== entry_edge
->dest
)
6078 goto fail_shrinkwrap
;
6079 bitmap_set_bit (&bb_flags
, bb
->index
);
6080 vec
.quick_push (bb
);
6083 else if (size
<= max_grow_size
)
6085 size
+= get_attr_min_length (insn
);
6086 if (size
> max_grow_size
)
6087 bitmap_set_bit (&bb_on_list
, bb
->index
);
6092 /* Blocks that really need a prologue, or are too big for tails. */
6093 bitmap_ior_into (&bb_on_list
, &bb_flags
);
6095 /* For every basic block that needs a prologue, mark all blocks
6096 reachable from it, so as to ensure they are also seen as
6097 requiring a prologue. */
6098 while (!vec
.is_empty ())
6100 basic_block tmp_bb
= vec
.pop ();
6102 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6103 if (e
->dest
!= EXIT_BLOCK_PTR
6104 && bitmap_set_bit (&bb_flags
, e
->dest
->index
))
6105 vec
.quick_push (e
->dest
);
6108 /* Find the set of basic blocks that need no prologue, have a
6109 single successor, can be duplicated, meet a max size
6110 requirement, and go to the exit via like blocks. */
6111 vec
.quick_push (EXIT_BLOCK_PTR
);
6112 while (!vec
.is_empty ())
6114 basic_block tmp_bb
= vec
.pop ();
6116 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6117 if (single_succ_p (e
->src
)
6118 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
6119 && can_duplicate_block_p (e
->src
))
6124 /* If there is predecessor of e->src which doesn't
6125 need prologue and the edge is complex,
6126 we might not be able to redirect the branch
6127 to a copy of e->src. */
6128 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
6129 if ((pe
->flags
& EDGE_COMPLEX
) != 0
6130 && !bitmap_bit_p (&bb_flags
, pe
->src
->index
))
6132 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
6133 vec
.quick_push (e
->src
);
6137 /* Now walk backwards from every block that is marked as needing
6138 a prologue to compute the bb_antic_flags bitmap. Exclude
6139 tail blocks; They can be duplicated to be used on paths not
6140 needing a prologue. */
6141 bitmap_clear (&bb_on_list
);
6142 bitmap_and_compl (&bb_antic_flags
, &bb_flags
, &bb_tail
);
6145 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6147 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6148 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6149 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6150 vec
.quick_push (e
->src
);
6152 while (!vec
.is_empty ())
6154 basic_block tmp_bb
= vec
.pop ();
6155 bool all_set
= true;
6157 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
6158 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6159 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
6167 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
6168 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6169 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6170 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6171 vec
.quick_push (e
->src
);
6174 /* Find exactly one edge that leads to a block in ANTIC from
6175 a block that isn't. */
6176 if (!bitmap_bit_p (&bb_antic_flags
, entry_edge
->dest
->index
))
6179 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6181 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6182 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
6184 if (entry_edge
!= orig_entry_edge
)
6186 entry_edge
= orig_entry_edge
;
6188 fprintf (dump_file
, "More than one candidate edge.\n");
6189 goto fail_shrinkwrap
;
6192 fprintf (dump_file
, "Found candidate edge for "
6193 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
6199 if (entry_edge
!= orig_entry_edge
)
6201 /* Test whether the prologue is known to clobber any register
6202 (other than FP or SP) which are live on the edge. */
6203 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
6204 if (frame_pointer_needed
)
6205 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
6206 REG_SET_TO_HARD_REG_SET (live_on_edge
,
6207 df_get_live_in (entry_edge
->dest
));
6208 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
6210 entry_edge
= orig_entry_edge
;
6213 "Shrink-wrapping aborted due to clobber.\n");
6216 if (entry_edge
!= orig_entry_edge
)
6218 crtl
->shrink_wrapped
= true;
6220 fprintf (dump_file
, "Performing shrink-wrapping.\n");
6222 /* Find tail blocks reachable from both blocks needing a
6223 prologue and blocks not needing a prologue. */
6224 if (!bitmap_empty_p (&bb_tail
))
6227 bool some_pro
, some_no_pro
;
6228 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
6230 some_pro
= some_no_pro
= false;
6231 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6233 if (bitmap_bit_p (&bb_flags
, e
->src
->index
))
6238 if (some_pro
&& some_no_pro
)
6239 vec
.quick_push (bb
);
6241 bitmap_clear_bit (&bb_tail
, bb
->index
);
6243 /* Find the head of each tail. */
6244 while (!vec
.is_empty ())
6246 basic_block tbb
= vec
.pop ();
6248 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
6251 while (single_succ_p (tbb
))
6253 tbb
= single_succ (tbb
);
6254 bitmap_clear_bit (&bb_tail
, tbb
->index
);
6257 /* Now duplicate the tails. */
6258 if (!bitmap_empty_p (&bb_tail
))
6259 FOR_EACH_BB_REVERSE (bb
)
6261 basic_block copy_bb
, tbb
;
6265 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
6268 /* Create a copy of BB, instructions and all, for
6269 use on paths that don't need a prologue.
6270 Ideal placement of the copy is on a fall-thru edge
6271 or after a block that would jump to the copy. */
6272 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6273 if (!bitmap_bit_p (&bb_flags
, e
->src
->index
)
6274 && single_succ_p (e
->src
))
6278 /* Make sure we insert after any barriers. */
6279 rtx end
= get_last_bb_insn (e
->src
);
6280 copy_bb
= create_basic_block (NEXT_INSN (end
),
6282 BB_COPY_PARTITION (copy_bb
, e
->src
);
6286 /* Otherwise put the copy at the end of the function. */
6287 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
6288 EXIT_BLOCK_PTR
->prev_bb
);
6289 BB_COPY_PARTITION (copy_bb
, bb
);
6292 insert_point
= emit_note_after (NOTE_INSN_DELETED
,
6294 emit_barrier_after (BB_END (copy_bb
));
6299 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
6301 tbb
= single_succ (tbb
);
6302 if (tbb
== EXIT_BLOCK_PTR
)
6304 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
6308 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6309 We have yet to add a simple_return to the tails,
6310 as we'd like to first convert_jumps_to_returns in
6311 case the block is no longer used after that. */
6313 if (CALL_P (PREV_INSN (insert_point
))
6314 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
6315 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
6316 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR
, eflags
);
6318 /* verify_flow_info doesn't like a note after a
6320 delete_insn (insert_point
);
6321 if (bitmap_empty_p (&bb_tail
))
6327 bitmap_clear (&bb_tail
);
6328 bitmap_clear (&bb_antic_flags
);
6329 bitmap_clear (&bb_on_list
);
6334 if (split_prologue_seq
!= NULL_RTX
)
6336 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6339 if (prologue_seq
!= NULL_RTX
)
6341 insert_insn_on_edge (prologue_seq
, entry_edge
);
6345 /* If the exit block has no non-fake predecessors, we don't need
6347 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6348 if ((e
->flags
& EDGE_FAKE
) == 0)
6353 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
6355 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
6357 /* If we're allowed to generate a simple return instruction, then by
6358 definition we don't need a full epilogue. If the last basic
6359 block before the exit block does not contain active instructions,
6360 examine its predecessors and try to emit (conditional) return
6362 #ifdef HAVE_simple_return
6363 if (entry_edge
!= orig_entry_edge
)
6369 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6370 (but won't remove). Stop at end of current preds. */
6371 last
= EDGE_COUNT (EXIT_BLOCK_PTR
->preds
);
6372 for (i
= 0; i
< last
; i
++)
6374 e
= EDGE_I (EXIT_BLOCK_PTR
->preds
, i
);
6375 if (LABEL_P (BB_HEAD (e
->src
))
6376 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
6377 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
6378 unconverted_simple_returns
6379 = convert_jumps_to_returns (e
->src
, true,
6380 unconverted_simple_returns
);
6384 if (exit_fallthru_edge
!= NULL
6385 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
6386 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
6388 basic_block last_bb
;
6390 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
6391 returnjump
= BB_END (last_bb
);
6392 exit_fallthru_edge
= NULL
;
6399 if (exit_fallthru_edge
== NULL
)
6404 basic_block last_bb
= exit_fallthru_edge
->src
;
6406 if (LABEL_P (BB_HEAD (last_bb
))
6407 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
6408 convert_jumps_to_returns (last_bb
, false, vNULL
);
6410 if (EDGE_COUNT (last_bb
->preds
) != 0
6411 && single_succ_p (last_bb
))
6413 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
6414 epilogue_end
= returnjump
= BB_END (last_bb
);
6415 #ifdef HAVE_simple_return
6416 /* Emitting the return may add a basic block.
6417 Fix bb_flags for the added block. */
6418 if (last_bb
!= exit_fallthru_edge
->src
)
6419 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6427 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6428 this marker for the splits of EH_RETURN patterns, and nothing else
6429 uses the flag in the meantime. */
6430 epilogue_completed
= 1;
6432 #ifdef HAVE_eh_return
6433 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6434 some targets, these get split to a special version of the epilogue
6435 code. In order to be able to properly annotate these with unwind
6436 info, try to split them now. If we get a valid split, drop an
6437 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6438 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6440 rtx prev
, last
, trial
;
6442 if (e
->flags
& EDGE_FALLTHRU
)
6444 last
= BB_END (e
->src
);
6445 if (!eh_returnjump_p (last
))
6448 prev
= PREV_INSN (last
);
6449 trial
= try_split (PATTERN (last
), last
, 1);
6453 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6454 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6458 /* If nothing falls through into the exit block, we don't need an
6461 if (exit_fallthru_edge
== NULL
)
6464 #ifdef HAVE_epilogue
6468 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6469 seq
= gen_epilogue ();
6471 emit_jump_insn (seq
);
6473 /* Retain a map of the epilogue insns. */
6474 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6475 set_insn_locations (seq
, epilogue_location
);
6478 returnjump
= get_last_insn ();
6481 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6484 if (JUMP_P (returnjump
))
6485 set_return_jump_label (returnjump
);
6492 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6494 /* We have a fall-through edge to the exit block, the source is not
6495 at the end of the function, and there will be an assembler epilogue
6496 at the end of the function.
6497 We can't use force_nonfallthru here, because that would try to
6498 use return. Inserting a jump 'by hand' is extremely messy, so
6499 we take advantage of cfg_layout_finalize using
6500 fixup_fallthru_exit_predecessor. */
6501 cfg_layout_initialize (0);
6502 FOR_EACH_BB (cur_bb
)
6503 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6504 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6505 cur_bb
->aux
= cur_bb
->next_bb
;
6506 cfg_layout_finalize ();
6511 default_rtl_profile ();
6517 commit_edge_insertions ();
6519 /* Look for basic blocks within the prologue insns. */
6520 blocks
= sbitmap_alloc (last_basic_block
);
6521 bitmap_clear (blocks
);
6522 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6523 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6524 find_many_sub_basic_blocks (blocks
);
6525 sbitmap_free (blocks
);
6527 /* The epilogue insns we inserted may cause the exit edge to no longer
6529 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6531 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6532 && returnjump_p (BB_END (e
->src
)))
6533 e
->flags
&= ~EDGE_FALLTHRU
;
6537 #ifdef HAVE_simple_return
6538 /* If there were branches to an empty LAST_BB which we tried to
6539 convert to conditional simple_returns, but couldn't for some
6540 reason, create a block to hold a simple_return insn and redirect
6541 those remaining edges. */
6542 if (!unconverted_simple_returns
.is_empty ())
6544 basic_block simple_return_block_hot
= NULL
;
6545 basic_block simple_return_block_cold
= NULL
;
6546 edge pending_edge_hot
= NULL
;
6547 edge pending_edge_cold
= NULL
;
6548 basic_block exit_pred
;
6551 gcc_assert (entry_edge
!= orig_entry_edge
);
6553 /* See if we can reuse the last insn that was emitted for the
6555 if (returnjump
!= NULL_RTX
6556 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
6558 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
6559 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6560 simple_return_block_hot
= e
->dest
;
6562 simple_return_block_cold
= e
->dest
;
6565 /* Also check returns we might need to add to tail blocks. */
6566 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6567 if (EDGE_COUNT (e
->src
->preds
) != 0
6568 && (e
->flags
& EDGE_FAKE
) != 0
6569 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6571 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6572 pending_edge_hot
= e
;
6574 pending_edge_cold
= e
;
6577 /* Save a pointer to the exit's predecessor BB for use in
6578 inserting new BBs at the end of the function. Do this
6579 after the call to split_block above which may split
6580 the original exit pred. */
6581 exit_pred
= EXIT_BLOCK_PTR
->prev_bb
;
6583 FOR_EACH_VEC_ELT (unconverted_simple_returns
, i
, e
)
6585 basic_block
*pdest_bb
;
6588 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6590 pdest_bb
= &simple_return_block_hot
;
6591 pending
= pending_edge_hot
;
6595 pdest_bb
= &simple_return_block_cold
;
6596 pending
= pending_edge_cold
;
6599 if (*pdest_bb
== NULL
&& pending
!= NULL
)
6601 emit_return_into_block (true, pending
->src
);
6602 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6603 *pdest_bb
= pending
->src
;
6605 else if (*pdest_bb
== NULL
)
6610 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
6611 BB_COPY_PARTITION (bb
, e
->src
);
6612 start
= emit_jump_insn_after (gen_simple_return (),
6614 JUMP_LABEL (start
) = simple_return_rtx
;
6615 emit_barrier_after (start
);
6618 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
6620 redirect_edge_and_branch_force (e
, *pdest_bb
);
6622 unconverted_simple_returns
.release ();
6625 if (entry_edge
!= orig_entry_edge
)
6627 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6628 if (EDGE_COUNT (e
->src
->preds
) != 0
6629 && (e
->flags
& EDGE_FAKE
) != 0
6630 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6632 emit_return_into_block (true, e
->src
);
6633 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6638 #ifdef HAVE_sibcall_epilogue
6639 /* Emit sibling epilogues before any sibling call sites. */
6640 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
6642 basic_block bb
= e
->src
;
6643 rtx insn
= BB_END (bb
);
6647 || ! SIBLING_CALL_P (insn
)
6648 #ifdef HAVE_simple_return
6649 || (entry_edge
!= orig_entry_edge
6650 && !bitmap_bit_p (&bb_flags
, bb
->index
))
6658 ep_seq
= gen_sibcall_epilogue ();
6662 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6667 /* Retain a map of the epilogue insns. Used in life analysis to
6668 avoid getting rid of sibcall epilogue insns. Do this before we
6669 actually emit the sequence. */
6670 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6671 set_insn_locations (seq
, epilogue_location
);
6673 emit_insn_before (seq
, insn
);
6679 #ifdef HAVE_epilogue
6684 /* Similarly, move any line notes that appear after the epilogue.
6685 There is no need, however, to be quite so anal about the existence
6686 of such a note. Also possibly move
6687 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6689 for (insn
= epilogue_end
; insn
; insn
= next
)
6691 next
= NEXT_INSN (insn
);
6693 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6694 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6699 #ifdef HAVE_simple_return
6700 bitmap_clear (&bb_flags
);
6703 /* Threading the prologue and epilogue changes the artificial refs
6704 in the entry and exit blocks. */
6705 epilogue_completed
= 1;
6706 df_update_entry_exit_and_calls ();
6709 /* Reposition the prologue-end and epilogue-begin notes after
6710 instruction scheduling. */
6713 reposition_prologue_and_epilogue_notes (void)
6715 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6716 || defined (HAVE_sibcall_epilogue)
6717 /* Since the hash table is created on demand, the fact that it is
6718 non-null is a signal that it is non-empty. */
6719 if (prologue_insn_hash
!= NULL
)
6721 size_t len
= htab_elements (prologue_insn_hash
);
6722 rtx insn
, last
= NULL
, note
= NULL
;
6724 /* Scan from the beginning until we reach the last prologue insn. */
6725 /* ??? While we do have the CFG intact, there are two problems:
6726 (1) The prologue can contain loops (typically probing the stack),
6727 which means that the end of the prologue isn't in the first bb.
6728 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6729 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6733 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6736 else if (contains (insn
, prologue_insn_hash
))
6748 /* Scan forward looking for the PROLOGUE_END note. It should
6749 be right at the beginning of the block, possibly with other
6750 insn notes that got moved there. */
6751 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6754 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6759 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6761 last
= NEXT_INSN (last
);
6762 reorder_insns (note
, note
, last
);
6766 if (epilogue_insn_hash
!= NULL
)
6771 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6773 rtx insn
, first
= NULL
, note
= NULL
;
6774 basic_block bb
= e
->src
;
6776 /* Scan from the beginning until we reach the first epilogue insn. */
6777 FOR_BB_INSNS (bb
, insn
)
6781 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6788 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6798 /* If the function has a single basic block, and no real
6799 epilogue insns (e.g. sibcall with no cleanup), the
6800 epilogue note can get scheduled before the prologue
6801 note. If we have frame related prologue insns, having
6802 them scanned during the epilogue will result in a crash.
6803 In this case re-order the epilogue note to just before
6804 the last insn in the block. */
6806 first
= BB_END (bb
);
6808 if (PREV_INSN (first
) != note
)
6809 reorder_insns (note
, note
, PREV_INSN (first
));
6813 #endif /* HAVE_prologue or HAVE_epilogue */
6816 /* Returns the name of function declared by FNDECL. */
6818 fndecl_name (tree fndecl
)
6822 return lang_hooks
.decl_printable_name (fndecl
, 2);
6825 /* Returns the name of function FN. */
6827 function_name (struct function
*fn
)
6829 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6830 return fndecl_name (fndecl
);
6833 /* Returns the name of the current function. */
6835 current_function_name (void)
6837 return function_name (cfun
);
6842 rest_of_handle_check_leaf_regs (void)
6844 #ifdef LEAF_REGISTERS
6845 crtl
->uses_only_leaf_regs
6846 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6851 /* Insert a TYPE into the used types hash table of CFUN. */
6854 used_types_insert_helper (tree type
, struct function
*func
)
6856 if (type
!= NULL
&& func
!= NULL
)
6860 if (func
->used_types_hash
== NULL
)
6861 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
6862 htab_eq_pointer
, NULL
);
6863 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
6869 /* Given a type, insert it into the used hash table in cfun. */
6871 used_types_insert (tree t
)
6873 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6878 if (TREE_CODE (t
) == ERROR_MARK
)
6880 if (TYPE_NAME (t
) == NULL_TREE
6881 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6882 t
= TYPE_MAIN_VARIANT (t
);
6883 if (debug_info_level
> DINFO_LEVEL_NONE
)
6886 used_types_insert_helper (t
, cfun
);
6889 /* So this might be a type referenced by a global variable.
6890 Record that type so that we can later decide to emit its
6891 debug information. */
6892 vec_safe_push (types_used_by_cur_var_decl
, t
);
6897 /* Helper to Hash a struct types_used_by_vars_entry. */
6900 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6902 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6904 return iterative_hash_object (entry
->type
,
6905 iterative_hash_object (entry
->var_decl
, 0));
6908 /* Hash function of the types_used_by_vars_entry hash table. */
6911 types_used_by_vars_do_hash (const void *x
)
6913 const struct types_used_by_vars_entry
*entry
=
6914 (const struct types_used_by_vars_entry
*) x
;
6916 return hash_types_used_by_vars_entry (entry
);
6919 /*Equality function of the types_used_by_vars_entry hash table. */
6922 types_used_by_vars_eq (const void *x1
, const void *x2
)
6924 const struct types_used_by_vars_entry
*e1
=
6925 (const struct types_used_by_vars_entry
*) x1
;
6926 const struct types_used_by_vars_entry
*e2
=
6927 (const struct types_used_by_vars_entry
*)x2
;
6929 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6932 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6935 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6937 if (type
!= NULL
&& var_decl
!= NULL
)
6940 struct types_used_by_vars_entry e
;
6941 e
.var_decl
= var_decl
;
6943 if (types_used_by_vars_hash
== NULL
)
6944 types_used_by_vars_hash
=
6945 htab_create_ggc (37, types_used_by_vars_do_hash
,
6946 types_used_by_vars_eq
, NULL
);
6947 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
6948 hash_types_used_by_vars_entry (&e
), INSERT
);
6951 struct types_used_by_vars_entry
*entry
;
6952 entry
= ggc_alloc_types_used_by_vars_entry ();
6954 entry
->var_decl
= var_decl
;
6962 const pass_data pass_data_leaf_regs
=
6964 RTL_PASS
, /* type */
6965 "*leaf_regs", /* name */
6966 OPTGROUP_NONE
, /* optinfo_flags */
6967 false, /* has_gate */
6968 true, /* has_execute */
6969 TV_NONE
, /* tv_id */
6970 0, /* properties_required */
6971 0, /* properties_provided */
6972 0, /* properties_destroyed */
6973 0, /* todo_flags_start */
6974 0, /* todo_flags_finish */
6977 class pass_leaf_regs
: public rtl_opt_pass
6980 pass_leaf_regs (gcc::context
*ctxt
)
6981 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6984 /* opt_pass methods: */
6985 unsigned int execute () { return rest_of_handle_check_leaf_regs (); }
6987 }; // class pass_leaf_regs
6992 make_pass_leaf_regs (gcc::context
*ctxt
)
6994 return new pass_leaf_regs (ctxt
);
6998 rest_of_handle_thread_prologue_and_epilogue (void)
7001 cleanup_cfg (CLEANUP_EXPENSIVE
);
7003 /* On some machines, the prologue and epilogue code, or parts thereof,
7004 can be represented as RTL. Doing so lets us schedule insns between
7005 it and the rest of the code and also allows delayed branch
7006 scheduling to operate in the epilogue. */
7007 thread_prologue_and_epilogue_insns ();
7009 /* The stack usage info is finalized during prologue expansion. */
7010 if (flag_stack_usage_info
)
7011 output_stack_usage ();
7018 const pass_data pass_data_thread_prologue_and_epilogue
=
7020 RTL_PASS
, /* type */
7021 "pro_and_epilogue", /* name */
7022 OPTGROUP_NONE
, /* optinfo_flags */
7023 false, /* has_gate */
7024 true, /* has_execute */
7025 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
7026 0, /* properties_required */
7027 0, /* properties_provided */
7028 0, /* properties_destroyed */
7029 TODO_verify_flow
, /* todo_flags_start */
7030 ( TODO_df_verify
| TODO_df_finish
7031 | TODO_verify_rtl_sharing
), /* todo_flags_finish */
7034 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
7037 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7038 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
7041 /* opt_pass methods: */
7042 unsigned int execute () {
7043 return rest_of_handle_thread_prologue_and_epilogue ();
7046 }; // class pass_thread_prologue_and_epilogue
7051 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
7053 return new pass_thread_prologue_and_epilogue (ctxt
);
7057 /* This mini-pass fixes fall-out from SSA in asm statements that have
7058 in-out constraints. Say you start with
7061 asm ("": "+mr" (inout));
7064 which is transformed very early to use explicit output and match operands:
7067 asm ("": "=mr" (inout) : "0" (inout));
7070 Or, after SSA and copyprop,
7072 asm ("": "=mr" (inout_2) : "0" (inout_1));
7075 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7076 they represent two separate values, so they will get different pseudo
7077 registers during expansion. Then, since the two operands need to match
7078 per the constraints, but use different pseudo registers, reload can
7079 only register a reload for these operands. But reloads can only be
7080 satisfied by hardregs, not by memory, so we need a register for this
7081 reload, just because we are presented with non-matching operands.
7082 So, even though we allow memory for this operand, no memory can be
7083 used for it, just because the two operands don't match. This can
7084 cause reload failures on register-starved targets.
7086 So it's a symptom of reload not being able to use memory for reloads
7087 or, alternatively it's also a symptom of both operands not coming into
7088 reload as matching (in which case the pseudo could go to memory just
7089 fine, as the alternative allows it, and no reload would be necessary).
7090 We fix the latter problem here, by transforming
7092 asm ("": "=mr" (inout_2) : "0" (inout_1));
7097 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7100 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
7103 bool changed
= false;
7104 rtx op
= SET_SRC (p_sets
[0]);
7105 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
7106 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
7107 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
7109 memset (output_matched
, 0, noutputs
* sizeof (bool));
7110 for (i
= 0; i
< ninputs
; i
++)
7112 rtx input
, output
, insns
;
7113 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
7117 if (*constraint
== '%')
7120 match
= strtoul (constraint
, &end
, 10);
7121 if (end
== constraint
)
7124 gcc_assert (match
< noutputs
);
7125 output
= SET_DEST (p_sets
[match
]);
7126 input
= RTVEC_ELT (inputs
, i
);
7127 /* Only do the transformation for pseudos. */
7128 if (! REG_P (output
)
7129 || rtx_equal_p (output
, input
)
7130 || (GET_MODE (input
) != VOIDmode
7131 && GET_MODE (input
) != GET_MODE (output
)))
7134 /* We can't do anything if the output is also used as input,
7135 as we're going to overwrite it. */
7136 for (j
= 0; j
< ninputs
; j
++)
7137 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
7142 /* Avoid changing the same input several times. For
7143 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7144 only change in once (to out1), rather than changing it
7145 first to out1 and afterwards to out2. */
7148 for (j
= 0; j
< noutputs
; j
++)
7149 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
7154 output_matched
[match
] = true;
7157 emit_move_insn (output
, input
);
7158 insns
= get_insns ();
7160 emit_insn_before (insns
, insn
);
7162 /* Now replace all mentions of the input with output. We can't
7163 just replace the occurrence in inputs[i], as the register might
7164 also be used in some other input (or even in an address of an
7165 output), which would mean possibly increasing the number of
7166 inputs by one (namely 'output' in addition), which might pose
7167 a too complicated problem for reload to solve. E.g. this situation:
7169 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7171 Here 'input' is used in two occurrences as input (once for the
7172 input operand, once for the address in the second output operand).
7173 If we would replace only the occurrence of the input operand (to
7174 make the matching) we would be left with this:
7177 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7179 Now we suddenly have two different input values (containing the same
7180 value, but different pseudos) where we formerly had only one.
7181 With more complicated asms this might lead to reload failures
7182 which wouldn't have happen without this pass. So, iterate over
7183 all operands and replace all occurrences of the register used. */
7184 for (j
= 0; j
< noutputs
; j
++)
7185 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
7186 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
7187 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
7189 for (j
= 0; j
< ninputs
; j
++)
7190 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
7191 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
7198 df_insn_rescan (insn
);
7202 rest_of_match_asm_constraints (void)
7205 rtx insn
, pat
, *p_sets
;
7208 if (!crtl
->has_asm_statement
)
7211 df_set_flags (DF_DEFER_INSN_RESCAN
);
7214 FOR_BB_INSNS (bb
, insn
)
7219 pat
= PATTERN (insn
);
7220 if (GET_CODE (pat
) == PARALLEL
)
7221 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
7222 else if (GET_CODE (pat
) == SET
)
7223 p_sets
= &PATTERN (insn
), noutputs
= 1;
7227 if (GET_CODE (*p_sets
) == SET
7228 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
7229 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
7233 return TODO_df_finish
;
7238 const pass_data pass_data_match_asm_constraints
=
7240 RTL_PASS
, /* type */
7241 "asmcons", /* name */
7242 OPTGROUP_NONE
, /* optinfo_flags */
7243 false, /* has_gate */
7244 true, /* has_execute */
7245 TV_NONE
, /* tv_id */
7246 0, /* properties_required */
7247 0, /* properties_provided */
7248 0, /* properties_destroyed */
7249 0, /* todo_flags_start */
7250 0, /* todo_flags_finish */
7253 class pass_match_asm_constraints
: public rtl_opt_pass
7256 pass_match_asm_constraints (gcc::context
*ctxt
)
7257 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
7260 /* opt_pass methods: */
7261 unsigned int execute () { return rest_of_match_asm_constraints (); }
7263 }; // class pass_match_asm_constraints
7268 make_pass_match_asm_constraints (gcc::context
*ctxt
)
7270 return new pass_match_asm_constraints (ctxt
);
7274 #include "gt-function.h"