1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
40 #include "rtl-error.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
57 #include "integrate.h"
58 #include "langhooks.h"
60 #include "cfglayout.h"
62 #include "tree-pass.h"
68 /* So we can assign to cfun in this file. */
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
98 int current_function_is_leaf
;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 pass_stack_ptr_mod has run. */
103 int current_function_sp_is_unchanging
;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs
;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated
;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no
;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function
* (*init_machine_status
) (void);
123 /* The currently compiled function. */
124 struct function
*cfun
= 0;
126 /* These hashes record the prologue and epilogue insns. */
127 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
128 htab_t prologue_insn_hash
;
129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
130 htab_t epilogue_insn_hash
;
133 htab_t types_used_by_vars_hash
= NULL
;
134 VEC(tree
,gc
) *types_used_by_cur_var_decl
;
136 /* Forward declarations. */
138 static struct temp_slot
*find_temp_slot_from_address (rtx
);
139 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
140 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
141 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
142 static int all_blocks (tree
, tree
*);
143 static tree
*get_block_vector (tree
, int *);
144 extern tree
debug_find_var_in_block_tree (tree
, tree
);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
148 static bool contains (const_rtx
, htab_t
);
150 static void emit_return_into_block (basic_block
);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx
, void *);
154 static void do_use_return_reg (rtx
, void *);
155 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function
*function_p
;
162 DEF_VEC_P(function_p
);
163 DEF_VEC_ALLOC_P(function_p
,heap
);
164 static VEC(function_p
,heap
) *function_context_stack
;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
170 push_function_context (void)
173 allocate_struct_function (NULL
, false);
175 VEC_safe_push (function_p
, heap
, function_context_stack
, cfun
);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
183 pop_function_context (void)
185 struct function
*p
= VEC_pop (function_p
, function_context_stack
);
187 current_function_decl
= p
->decl
;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated
= 0;
191 generating_concat_p
= 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
199 free_after_parsing (struct function
*f
)
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
209 free_after_compilation (struct function
*f
)
211 prologue_insn_hash
= NULL
;
212 epilogue_insn_hash
= NULL
;
214 if (crtl
->emit
.regno_pointer_align
)
215 free (crtl
->emit
.regno_pointer_align
);
217 memset (crtl
, 0, sizeof (struct rtl_data
));
222 regno_reg_rtx
= NULL
;
223 insn_locators_free ();
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
231 get_frame_size (void)
233 if (FRAME_GROWS_DOWNWARD
)
234 return -frame_offset
;
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
244 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
246 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
248 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD
)
252 error_at (DECL_SOURCE_LOCATION (func
),
253 "total size of local objects too large");
260 /* Return stack slot alignment in bits for TYPE and MODE. */
263 get_stack_local_alignment (tree type
, enum machine_mode mode
)
265 unsigned int alignment
;
268 alignment
= BIGGEST_ALIGNMENT
;
270 alignment
= GET_MODE_ALIGNMENT (mode
);
272 /* Allow the frond-end to (possibly) increase the alignment of this
275 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
277 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
288 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
289 HOST_WIDE_INT size
, unsigned int alignment
,
290 HOST_WIDE_INT
*poffset
)
292 HOST_WIDE_INT this_frame_offset
;
293 int frame_off
, frame_alignment
, frame_phase
;
295 /* Calculate how many bytes the start of local variables is off from
297 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
298 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
299 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
301 /* Round the frame offset to the specified alignment. */
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD
)
309 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
310 (unsigned HOST_WIDE_INT
) alignment
)
314 = (CEIL_ROUND (start
- frame_phase
,
315 (unsigned HOST_WIDE_INT
) alignment
)
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
322 frame_offset
= this_frame_offset
;
323 else if (this_frame_offset
< start
)
325 else if (start
+ length
== frame_offset
326 && this_frame_offset
+ size
> start
+ length
)
327 frame_offset
= this_frame_offset
+ size
;
328 else if (this_frame_offset
+ size
> start
+ length
)
331 *poffset
= this_frame_offset
;
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
340 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
342 struct frame_space
*space
= ggc_alloc_frame_space ();
343 space
->next
= crtl
->frame_space_list
;
344 crtl
->frame_space_list
= space
;
345 space
->start
= start
;
346 space
->length
= end
- start
;
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
358 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
360 We do not round to stack_boundary here. */
363 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
365 bool reduce_alignment_ok ATTRIBUTE_UNUSED
)
368 int bigend_correction
= 0;
369 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
370 unsigned int alignment
, alignment_in_bits
;
374 alignment
= get_stack_local_alignment (NULL
, mode
);
375 alignment
/= BITS_PER_UNIT
;
377 else if (align
== -1)
379 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
380 size
= CEIL_ROUND (size
, alignment
);
382 else if (align
== -2)
383 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
385 alignment
= align
/ BITS_PER_UNIT
;
387 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
389 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
390 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
392 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
393 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
396 if (SUPPORTS_STACK_ALIGNMENT
)
398 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
400 if (!crtl
->stack_realign_processed
)
401 crtl
->stack_alignment_estimated
= alignment_in_bits
;
404 /* If stack is realigned and stack alignment value
405 hasn't been finalized, it is OK not to increase
406 stack_alignment_estimated. The bigger alignment
407 requirement is recorded in stack_alignment_needed
409 gcc_assert (!crtl
->stack_realign_finalized
);
410 if (!crtl
->stack_realign_needed
)
412 /* It is OK to reduce the alignment as long as the
413 requested size is 0 or the estimated stack
414 alignment >= mode alignment. */
415 gcc_assert (reduce_alignment_ok
417 || (crtl
->stack_alignment_estimated
418 >= GET_MODE_ALIGNMENT (mode
)));
419 alignment_in_bits
= crtl
->stack_alignment_estimated
;
420 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
426 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
427 crtl
->stack_alignment_needed
= alignment_in_bits
;
428 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
429 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
431 if (mode
!= BLKmode
|| size
!= 0)
433 struct frame_space
**psp
;
435 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
437 struct frame_space
*space
= *psp
;
438 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
439 alignment
, &slot_offset
))
442 if (slot_offset
> space
->start
)
443 add_frame_space (space
->start
, slot_offset
);
444 if (slot_offset
+ size
< space
->start
+ space
->length
)
445 add_frame_space (slot_offset
+ size
,
446 space
->start
+ space
->length
);
450 else if (!STACK_ALIGNMENT_NEEDED
)
452 slot_offset
= frame_offset
;
456 old_frame_offset
= frame_offset
;
458 if (FRAME_GROWS_DOWNWARD
)
460 frame_offset
-= size
;
461 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
463 if (slot_offset
> frame_offset
)
464 add_frame_space (frame_offset
, slot_offset
);
465 if (slot_offset
+ size
< old_frame_offset
)
466 add_frame_space (slot_offset
+ size
, old_frame_offset
);
470 frame_offset
+= size
;
471 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
473 if (slot_offset
> old_frame_offset
)
474 add_frame_space (old_frame_offset
, slot_offset
);
475 if (slot_offset
+ size
< frame_offset
)
476 add_frame_space (slot_offset
+ size
, frame_offset
);
480 /* On a big-endian machine, if we are allocating more space than we will use,
481 use the least significant bytes of those that are allocated. */
482 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
483 bigend_correction
= size
- GET_MODE_SIZE (mode
);
485 /* If we have already instantiated virtual registers, return the actual
486 address relative to the frame pointer. */
487 if (virtuals_instantiated
)
488 addr
= plus_constant (frame_pointer_rtx
,
490 (slot_offset
+ bigend_correction
491 + STARTING_FRAME_OFFSET
, Pmode
));
493 addr
= plus_constant (virtual_stack_vars_rtx
,
495 (slot_offset
+ bigend_correction
,
498 x
= gen_rtx_MEM (mode
, addr
);
499 set_mem_align (x
, alignment_in_bits
);
500 MEM_NOTRAP_P (x
) = 1;
503 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
505 if (frame_offset_overflow (frame_offset
, current_function_decl
))
511 /* Wrap up assign_stack_local_1 with last parameter as false. */
514 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
516 return assign_stack_local_1 (mode
, size
, align
, false);
520 /* In order to evaluate some expressions, such as function calls returning
521 structures in memory, we need to temporarily allocate stack locations.
522 We record each allocated temporary in the following structure.
524 Associated with each temporary slot is a nesting level. When we pop up
525 one level, all temporaries associated with the previous level are freed.
526 Normally, all temporaries are freed after the execution of the statement
527 in which they were created. However, if we are inside a ({...}) grouping,
528 the result may be in a temporary and hence must be preserved. If the
529 result could be in a temporary, we preserve it if we can determine which
530 one it is in. If we cannot determine which temporary may contain the
531 result, all temporaries are preserved. A temporary is preserved by
532 pretending it was allocated at the previous nesting level.
534 Automatic variables are also assigned temporary slots, at the nesting
535 level where they are defined. They are marked a "kept" so that
536 free_temp_slots will not free them. */
538 struct GTY(()) temp_slot
{
539 /* Points to next temporary slot. */
540 struct temp_slot
*next
;
541 /* Points to previous temporary slot. */
542 struct temp_slot
*prev
;
543 /* The rtx to used to reference the slot. */
545 /* The size, in units, of the slot. */
547 /* The type of the object in the slot, or zero if it doesn't correspond
548 to a type. We use this to determine whether a slot can be reused.
549 It can be reused if objects of the type of the new slot will always
550 conflict with objects of the type of the old slot. */
552 /* The alignment (in bits) of the slot. */
554 /* Nonzero if this temporary is currently in use. */
556 /* Nonzero if this temporary has its address taken. */
558 /* Nesting level at which this slot is being used. */
560 /* Nonzero if this should survive a call to free_temp_slots. */
562 /* The offset of the slot from the frame_pointer, including extra space
563 for alignment. This info is for combine_temp_slots. */
564 HOST_WIDE_INT base_offset
;
565 /* The size of the slot, including extra space for alignment. This
566 info is for combine_temp_slots. */
567 HOST_WIDE_INT full_size
;
570 /* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
574 /* Entry for the above hash table. */
575 struct GTY(()) temp_slot_address_entry
{
578 struct temp_slot
*temp_slot
;
581 /* Removes temporary slot TEMP from LIST. */
584 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
587 temp
->next
->prev
= temp
->prev
;
589 temp
->prev
->next
= temp
->next
;
593 temp
->prev
= temp
->next
= NULL
;
596 /* Inserts temporary slot TEMP to LIST. */
599 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
603 (*list
)->prev
= temp
;
608 /* Returns the list of used temp slots at LEVEL. */
610 static struct temp_slot
**
611 temp_slots_at_level (int level
)
613 if (level
>= (int) VEC_length (temp_slot_p
, used_temp_slots
))
614 VEC_safe_grow_cleared (temp_slot_p
, gc
, used_temp_slots
, level
+ 1);
616 return &(VEC_address (temp_slot_p
, used_temp_slots
)[level
]);
619 /* Returns the maximal temporary slot level. */
622 max_slot_level (void)
624 if (!used_temp_slots
)
627 return VEC_length (temp_slot_p
, used_temp_slots
) - 1;
630 /* Moves temporary slot TEMP to LEVEL. */
633 move_slot_to_level (struct temp_slot
*temp
, int level
)
635 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
636 insert_slot_to_list (temp
, temp_slots_at_level (level
));
640 /* Make temporary slot TEMP available. */
643 make_slot_available (struct temp_slot
*temp
)
645 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
646 insert_slot_to_list (temp
, &avail_temp_slots
);
651 /* Compute the hash value for an address -> temp slot mapping.
652 The value is cached on the mapping entry. */
654 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
656 int do_not_record
= 0;
657 return hash_rtx (t
->address
, GET_MODE (t
->address
),
658 &do_not_record
, NULL
, false);
661 /* Return the hash value for an address -> temp slot mapping. */
663 temp_slot_address_hash (const void *p
)
665 const struct temp_slot_address_entry
*t
;
666 t
= (const struct temp_slot_address_entry
*) p
;
670 /* Compare two address -> temp slot mapping entries. */
672 temp_slot_address_eq (const void *p1
, const void *p2
)
674 const struct temp_slot_address_entry
*t1
, *t2
;
675 t1
= (const struct temp_slot_address_entry
*) p1
;
676 t2
= (const struct temp_slot_address_entry
*) p2
;
677 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
680 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
682 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
685 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
686 t
->address
= address
;
687 t
->temp_slot
= temp_slot
;
688 t
->hash
= temp_slot_address_compute_hash (t
);
689 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
693 /* Remove an address -> temp slot mapping entry if the temp slot is
694 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
696 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
698 const struct temp_slot_address_entry
*t
;
699 t
= (const struct temp_slot_address_entry
*) *slot
;
700 if (! t
->temp_slot
->in_use
)
705 /* Remove all mappings of addresses to unused temp slots. */
707 remove_unused_temp_slot_addresses (void)
709 htab_traverse (temp_slot_address_table
,
710 remove_unused_temp_slot_addresses_1
,
714 /* Find the temp slot corresponding to the object at address X. */
716 static struct temp_slot
*
717 find_temp_slot_from_address (rtx x
)
720 struct temp_slot_address_entry tmp
, *t
;
722 /* First try the easy way:
723 See if X exists in the address -> temp slot mapping. */
725 tmp
.temp_slot
= NULL
;
726 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
727 t
= (struct temp_slot_address_entry
*)
728 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
732 /* If we have a sum involving a register, see if it points to a temp
734 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
735 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
737 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
738 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
741 /* Last resort: Address is a virtual stack var address. */
742 if (GET_CODE (x
) == PLUS
743 && XEXP (x
, 0) == virtual_stack_vars_rtx
744 && CONST_INT_P (XEXP (x
, 1)))
747 for (i
= max_slot_level (); i
>= 0; i
--)
748 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
750 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
751 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
759 /* Allocate a temporary stack slot and record it for possible later
762 MODE is the machine mode to be given to the returned rtx.
764 SIZE is the size in units of the space required. We do no rounding here
765 since assign_stack_local will do any required rounding.
767 KEEP is 1 if this slot is to be retained after a call to
768 free_temp_slots. Automatic variables for a block are allocated
769 with this flag. KEEP values of 2 or 3 were needed respectively
770 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
771 or for SAVE_EXPRs, but they are now unused.
773 TYPE is the type that will be used for the stack slot. */
776 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
780 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
783 /* If SIZE is -1 it means that somebody tried to allocate a temporary
784 of a variable size. */
785 gcc_assert (size
!= -1);
787 /* These are now unused. */
788 gcc_assert (keep
<= 1);
790 align
= get_stack_local_alignment (type
, mode
);
792 /* Try to find an available, already-allocated temporary of the proper
793 mode which meets the size and alignment requirements. Choose the
794 smallest one with the closest alignment.
796 If assign_stack_temp is called outside of the tree->rtl expansion,
797 we cannot reuse the stack slots (that may still refer to
798 VIRTUAL_STACK_VARS_REGNUM). */
799 if (!virtuals_instantiated
)
801 for (p
= avail_temp_slots
; p
; p
= p
->next
)
803 if (p
->align
>= align
&& p
->size
>= size
804 && GET_MODE (p
->slot
) == mode
805 && objects_must_conflict_p (p
->type
, type
)
806 && (best_p
== 0 || best_p
->size
> p
->size
807 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
809 if (p
->align
== align
&& p
->size
== size
)
812 cut_slot_from_list (selected
, &avail_temp_slots
);
821 /* Make our best, if any, the one to use. */
825 cut_slot_from_list (selected
, &avail_temp_slots
);
827 /* If there are enough aligned bytes left over, make them into a new
828 temp_slot so that the extra bytes don't get wasted. Do this only
829 for BLKmode slots, so that we can be sure of the alignment. */
830 if (GET_MODE (best_p
->slot
) == BLKmode
)
832 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
833 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
835 if (best_p
->size
- rounded_size
>= alignment
)
837 p
= ggc_alloc_temp_slot ();
838 p
->in_use
= p
->addr_taken
= 0;
839 p
->size
= best_p
->size
- rounded_size
;
840 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
841 p
->full_size
= best_p
->full_size
- rounded_size
;
842 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
843 p
->align
= best_p
->align
;
844 p
->type
= best_p
->type
;
845 insert_slot_to_list (p
, &avail_temp_slots
);
847 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
850 best_p
->size
= rounded_size
;
851 best_p
->full_size
= rounded_size
;
856 /* If we still didn't find one, make a new temporary. */
859 HOST_WIDE_INT frame_offset_old
= frame_offset
;
861 p
= ggc_alloc_temp_slot ();
863 /* We are passing an explicit alignment request to assign_stack_local.
864 One side effect of that is assign_stack_local will not round SIZE
865 to ensure the frame offset remains suitably aligned.
867 So for requests which depended on the rounding of SIZE, we go ahead
868 and round it now. We also make sure ALIGNMENT is at least
869 BIGGEST_ALIGNMENT. */
870 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
871 p
->slot
= assign_stack_local (mode
,
873 ? CEIL_ROUND (size
, (int) align
/ BITS_PER_UNIT
)
879 /* The following slot size computation is necessary because we don't
880 know the actual size of the temporary slot until assign_stack_local
881 has performed all the frame alignment and size rounding for the
882 requested temporary. Note that extra space added for alignment
883 can be either above or below this stack slot depending on which
884 way the frame grows. We include the extra space if and only if it
885 is above this slot. */
886 if (FRAME_GROWS_DOWNWARD
)
887 p
->size
= frame_offset_old
- frame_offset
;
891 /* Now define the fields used by combine_temp_slots. */
892 if (FRAME_GROWS_DOWNWARD
)
894 p
->base_offset
= frame_offset
;
895 p
->full_size
= frame_offset_old
- frame_offset
;
899 p
->base_offset
= frame_offset_old
;
900 p
->full_size
= frame_offset
- frame_offset_old
;
910 p
->level
= temp_slot_level
;
913 pp
= temp_slots_at_level (p
->level
);
914 insert_slot_to_list (p
, pp
);
915 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
917 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
918 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
919 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
921 /* If we know the alias set for the memory that will be used, use
922 it. If there's no TYPE, then we don't know anything about the
923 alias set for the memory. */
924 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
925 set_mem_align (slot
, align
);
927 /* If a type is specified, set the relevant flags. */
930 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
931 MEM_SET_IN_STRUCT_P (slot
, (AGGREGATE_TYPE_P (type
)
932 || TREE_CODE (type
) == COMPLEX_TYPE
));
934 MEM_NOTRAP_P (slot
) = 1;
939 /* Allocate a temporary stack slot and record it for possible later
940 reuse. First three arguments are same as in preceding function. */
943 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
945 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
948 /* Assign a temporary.
949 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
950 and so that should be used in error messages. In either case, we
951 allocate of the given type.
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
959 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
960 int dont_promote ATTRIBUTE_UNUSED
)
963 enum machine_mode mode
;
968 if (DECL_P (type_or_decl
))
969 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
971 decl
= NULL
, type
= type_or_decl
;
973 mode
= TYPE_MODE (type
);
975 unsignedp
= TYPE_UNSIGNED (type
);
978 if (mode
== BLKmode
|| memory_required
)
980 HOST_WIDE_INT size
= int_size_in_bytes (type
);
983 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
984 problems with allocating the stack space. */
988 /* Unfortunately, we don't yet know how to allocate variable-sized
989 temporaries. However, sometimes we can find a fixed upper limit on
990 the size, so try that instead. */
992 size
= max_int_size_in_bytes (type
);
994 /* The size of the temporary may be too large to fit into an integer. */
995 /* ??? Not sure this should happen except for user silliness, so limit
996 this to things that aren't compiler-generated temporaries. The
997 rest of the time we'll die in assign_stack_temp_for_type. */
998 if (decl
&& size
== -1
999 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1001 error ("size of variable %q+D is too large", decl
);
1005 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
1011 mode
= promote_mode (type
, mode
, &unsignedp
);
1014 return gen_reg_rtx (mode
);
1017 /* Combine temporary stack slots which are adjacent on the stack.
1019 This allows for better use of already allocated stack space. This is only
1020 done for BLKmode slots because we can be sure that we won't have alignment
1021 problems in this case. */
1024 combine_temp_slots (void)
1026 struct temp_slot
*p
, *q
, *next
, *next_q
;
1029 /* We can't combine slots, because the information about which slot
1030 is in which alias set will be lost. */
1031 if (flag_strict_aliasing
)
1034 /* If there are a lot of temp slots, don't do anything unless
1035 high levels of optimization. */
1036 if (! flag_expensive_optimizations
)
1037 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1038 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1041 for (p
= avail_temp_slots
; p
; p
= next
)
1047 if (GET_MODE (p
->slot
) != BLKmode
)
1050 for (q
= p
->next
; q
; q
= next_q
)
1056 if (GET_MODE (q
->slot
) != BLKmode
)
1059 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1061 /* Q comes after P; combine Q into P. */
1063 p
->full_size
+= q
->full_size
;
1066 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1068 /* P comes after Q; combine P into Q. */
1070 q
->full_size
+= p
->full_size
;
1075 cut_slot_from_list (q
, &avail_temp_slots
);
1078 /* Either delete P or advance past it. */
1080 cut_slot_from_list (p
, &avail_temp_slots
);
1084 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1085 slot that previously was known by OLD_RTX. */
1088 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1090 struct temp_slot
*p
;
1092 if (rtx_equal_p (old_rtx
, new_rtx
))
1095 p
= find_temp_slot_from_address (old_rtx
);
1097 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1098 NEW_RTX is a register, see if one operand of the PLUS is a
1099 temporary location. If so, NEW_RTX points into it. Otherwise,
1100 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1101 in common between them. If so, try a recursive call on those
1105 if (GET_CODE (old_rtx
) != PLUS
)
1108 if (REG_P (new_rtx
))
1110 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1111 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1114 else if (GET_CODE (new_rtx
) != PLUS
)
1117 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1118 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1119 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1120 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1121 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1122 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1123 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1124 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1129 /* Otherwise add an alias for the temp's address. */
1130 insert_temp_slot_address (new_rtx
, p
);
1133 /* If X could be a reference to a temporary slot, mark the fact that its
1134 address was taken. */
1137 mark_temp_addr_taken (rtx x
)
1139 struct temp_slot
*p
;
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1149 p
= find_temp_slot_from_address (XEXP (x
, 0));
1154 /* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1164 preserve_temp_slots (rtx x
)
1166 struct temp_slot
*p
= 0, *next
;
1168 /* If there is no result, we still might have some objects whose address
1169 were taken, so we need to make sure they stay around. */
1172 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1177 move_slot_to_level (p
, temp_slot_level
- 1);
1183 /* If X is a register that is being used as a pointer, see if we have
1184 a temporary slot we know it points to. To be consistent with
1185 the code below, we really should preserve all non-kept slots
1186 if we can't find a match, but that seems to be much too costly. */
1187 if (REG_P (x
) && REG_POINTER (x
))
1188 p
= find_temp_slot_from_address (x
);
1190 /* If X is not in memory or is at a constant address, it cannot be in
1191 a temporary slot, but it can contain something whose address was
1193 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1195 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1200 move_slot_to_level (p
, temp_slot_level
- 1);
1206 /* First see if we can find a match. */
1208 p
= find_temp_slot_from_address (XEXP (x
, 0));
1212 /* Move everything at our level whose address was taken to our new
1213 level in case we used its address. */
1214 struct temp_slot
*q
;
1216 if (p
->level
== temp_slot_level
)
1218 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1222 if (p
!= q
&& q
->addr_taken
)
1223 move_slot_to_level (q
, temp_slot_level
- 1);
1226 move_slot_to_level (p
, temp_slot_level
- 1);
1232 /* Otherwise, preserve all non-kept slots at this level. */
1233 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1238 move_slot_to_level (p
, temp_slot_level
- 1);
1242 /* Free all temporaries used so far. This is normally called at the
1243 end of generating code for a statement. */
1246 free_temp_slots (void)
1248 struct temp_slot
*p
, *next
;
1249 bool some_available
= false;
1251 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1257 make_slot_available (p
);
1258 some_available
= true;
1264 remove_unused_temp_slot_addresses ();
1265 combine_temp_slots ();
1269 /* Push deeper into the nesting level for stack temporaries. */
1272 push_temp_slots (void)
1277 /* Pop a temporary nesting level. All slots in use in the current level
1281 pop_temp_slots (void)
1283 struct temp_slot
*p
, *next
;
1284 bool some_available
= false;
1286 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1289 make_slot_available (p
);
1290 some_available
= true;
1295 remove_unused_temp_slot_addresses ();
1296 combine_temp_slots ();
1302 /* Initialize temporary slots. */
1305 init_temp_slots (void)
1307 /* We have not allocated any temporaries yet. */
1308 avail_temp_slots
= 0;
1309 used_temp_slots
= 0;
1310 temp_slot_level
= 0;
1312 /* Set up the table to map addresses to temp slots. */
1313 if (! temp_slot_address_table
)
1314 temp_slot_address_table
= htab_create_ggc (32,
1315 temp_slot_address_hash
,
1316 temp_slot_address_eq
,
1319 htab_empty (temp_slot_address_table
);
1322 /* These routines are responsible for converting virtual register references
1323 to the actual hard register references once RTL generation is complete.
1325 The following four variables are used for communication between the
1326 routines. They contain the offsets of the virtual registers from their
1327 respective hard registers. */
1329 static int in_arg_offset
;
1330 static int var_offset
;
1331 static int dynamic_offset
;
1332 static int out_arg_offset
;
1333 static int cfa_offset
;
1335 /* In most machines, the stack pointer register is equivalent to the bottom
1338 #ifndef STACK_POINTER_OFFSET
1339 #define STACK_POINTER_OFFSET 0
1342 /* If not defined, pick an appropriate default for the offset of dynamically
1343 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1344 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1346 #ifndef STACK_DYNAMIC_OFFSET
1348 /* The bottom of the stack points to the actual arguments. If
1349 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1350 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1351 stack space for register parameters is not pushed by the caller, but
1352 rather part of the fixed stack areas and hence not included in
1353 `crtl->outgoing_args_size'. Nevertheless, we must allow
1354 for it when allocating stack dynamic objects. */
1356 #if defined(REG_PARM_STACK_SPACE)
1357 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1358 ((ACCUMULATE_OUTGOING_ARGS \
1359 ? (crtl->outgoing_args_size \
1360 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1361 : REG_PARM_STACK_SPACE (FNDECL))) \
1362 : 0) + (STACK_POINTER_OFFSET))
1364 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1365 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1366 + (STACK_POINTER_OFFSET))
1371 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1372 is a virtual register, return the equivalent hard register and set the
1373 offset indirectly through the pointer. Otherwise, return 0. */
1376 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1379 HOST_WIDE_INT offset
;
1381 if (x
== virtual_incoming_args_rtx
)
1383 if (stack_realign_drap
)
1385 /* Replace virtual_incoming_args_rtx with internal arg
1386 pointer if DRAP is used to realign stack. */
1387 new_rtx
= crtl
->args
.internal_arg_pointer
;
1391 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1393 else if (x
== virtual_stack_vars_rtx
)
1394 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1395 else if (x
== virtual_stack_dynamic_rtx
)
1396 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1397 else if (x
== virtual_outgoing_args_rtx
)
1398 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1399 else if (x
== virtual_cfa_rtx
)
1401 #ifdef FRAME_POINTER_CFA_OFFSET
1402 new_rtx
= frame_pointer_rtx
;
1404 new_rtx
= arg_pointer_rtx
;
1406 offset
= cfa_offset
;
1408 else if (x
== virtual_preferred_stack_boundary_rtx
)
1410 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1420 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1421 Instantiate any virtual registers present inside of *LOC. The expression
1422 is simplified, as much as possible, but is not to be considered "valid"
1423 in any sense implied by the target. If any change is made, set CHANGED
1427 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1429 HOST_WIDE_INT offset
;
1430 bool *changed
= (bool *) data
;
1437 switch (GET_CODE (x
))
1440 new_rtx
= instantiate_new_reg (x
, &offset
);
1443 *loc
= plus_constant (new_rtx
, offset
);
1450 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1453 new_rtx
= plus_constant (new_rtx
, offset
);
1454 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1460 /* FIXME -- from old code */
1461 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1462 we can commute the PLUS and SUBREG because pointers into the
1463 frame are well-behaved. */
1473 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1474 matches the predicate for insn CODE operand OPERAND. */
1477 safe_insn_predicate (int code
, int operand
, rtx x
)
1479 const struct insn_operand_data
*op_data
;
1484 op_data
= &insn_data
[code
].operand
[operand
];
1485 if (op_data
->predicate
== NULL
)
1488 return op_data
->predicate (x
, op_data
->mode
);
1491 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1492 registers present inside of insn. The result will be a valid insn. */
1495 instantiate_virtual_regs_in_insn (rtx insn
)
1497 HOST_WIDE_INT offset
;
1499 bool any_change
= false;
1500 rtx set
, new_rtx
, x
, seq
;
1502 /* There are some special cases to be handled first. */
1503 set
= single_set (insn
);
1506 /* We're allowed to assign to a virtual register. This is interpreted
1507 to mean that the underlying register gets assigned the inverse
1508 transformation. This is used, for example, in the handling of
1510 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1515 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1516 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1518 x
= force_operand (x
, new_rtx
);
1520 emit_move_insn (new_rtx
, x
);
1525 emit_insn_before (seq
, insn
);
1530 /* Handle a straight copy from a virtual register by generating a
1531 new add insn. The difference between this and falling through
1532 to the generic case is avoiding a new pseudo and eliminating a
1533 move insn in the initial rtl stream. */
1534 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1535 if (new_rtx
&& offset
!= 0
1536 && REG_P (SET_DEST (set
))
1537 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1541 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1542 new_rtx
, GEN_INT (offset
), SET_DEST (set
),
1543 1, OPTAB_LIB_WIDEN
);
1544 if (x
!= SET_DEST (set
))
1545 emit_move_insn (SET_DEST (set
), x
);
1550 emit_insn_before (seq
, insn
);
1555 extract_insn (insn
);
1556 insn_code
= INSN_CODE (insn
);
1558 /* Handle a plus involving a virtual register by determining if the
1559 operands remain valid if they're modified in place. */
1560 if (GET_CODE (SET_SRC (set
)) == PLUS
1561 && recog_data
.n_operands
>= 3
1562 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1563 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1564 && CONST_INT_P (recog_data
.operand
[2])
1565 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1567 offset
+= INTVAL (recog_data
.operand
[2]);
1569 /* If the sum is zero, then replace with a plain move. */
1571 && REG_P (SET_DEST (set
))
1572 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1575 emit_move_insn (SET_DEST (set
), new_rtx
);
1579 emit_insn_before (seq
, insn
);
1584 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1586 /* Using validate_change and apply_change_group here leaves
1587 recog_data in an invalid state. Since we know exactly what
1588 we want to check, do those two by hand. */
1589 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1590 && safe_insn_predicate (insn_code
, 2, x
))
1592 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1593 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1596 /* Fall through into the regular operand fixup loop in
1597 order to take care of operands other than 1 and 2. */
1603 extract_insn (insn
);
1604 insn_code
= INSN_CODE (insn
);
1607 /* In the general case, we expect virtual registers to appear only in
1608 operands, and then only as either bare registers or inside memories. */
1609 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1611 x
= recog_data
.operand
[i
];
1612 switch (GET_CODE (x
))
1616 rtx addr
= XEXP (x
, 0);
1617 bool changed
= false;
1619 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1624 x
= replace_equiv_address (x
, addr
);
1625 /* It may happen that the address with the virtual reg
1626 was valid (e.g. based on the virtual stack reg, which might
1627 be acceptable to the predicates with all offsets), whereas
1628 the address now isn't anymore, for instance when the address
1629 is still offsetted, but the base reg isn't virtual-stack-reg
1630 anymore. Below we would do a force_reg on the whole operand,
1631 but this insn might actually only accept memory. Hence,
1632 before doing that last resort, try to reload the address into
1633 a register, so this operand stays a MEM. */
1634 if (!safe_insn_predicate (insn_code
, i
, x
))
1636 addr
= force_reg (GET_MODE (addr
), addr
);
1637 x
= replace_equiv_address (x
, addr
);
1642 emit_insn_before (seq
, insn
);
1647 new_rtx
= instantiate_new_reg (x
, &offset
);
1648 if (new_rtx
== NULL
)
1656 /* Careful, special mode predicates may have stuff in
1657 insn_data[insn_code].operand[i].mode that isn't useful
1658 to us for computing a new value. */
1659 /* ??? Recognize address_operand and/or "p" constraints
1660 to see if (plus new offset) is a valid before we put
1661 this through expand_simple_binop. */
1662 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1663 GEN_INT (offset
), NULL_RTX
,
1664 1, OPTAB_LIB_WIDEN
);
1667 emit_insn_before (seq
, insn
);
1672 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1673 if (new_rtx
== NULL
)
1678 new_rtx
= expand_simple_binop (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1679 GEN_INT (offset
), NULL_RTX
,
1680 1, OPTAB_LIB_WIDEN
);
1683 emit_insn_before (seq
, insn
);
1685 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1686 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1694 /* At this point, X contains the new value for the operand.
1695 Validate the new value vs the insn predicate. Note that
1696 asm insns will have insn_code -1 here. */
1697 if (!safe_insn_predicate (insn_code
, i
, x
))
1702 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1703 x
= copy_to_reg (x
);
1706 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1710 emit_insn_before (seq
, insn
);
1713 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1719 /* Propagate operand changes into the duplicates. */
1720 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1721 *recog_data
.dup_loc
[i
]
1722 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1724 /* Force re-recognition of the instruction for validation. */
1725 INSN_CODE (insn
) = -1;
1728 if (asm_noperands (PATTERN (insn
)) >= 0)
1730 if (!check_asm_operands (PATTERN (insn
)))
1732 error_for_asm (insn
, "impossible constraint in %<asm%>");
1738 if (recog_memoized (insn
) < 0)
1739 fatal_insn_not_found (insn
);
1743 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1744 do any instantiation required. */
1747 instantiate_decl_rtl (rtx x
)
1754 /* If this is a CONCAT, recurse for the pieces. */
1755 if (GET_CODE (x
) == CONCAT
)
1757 instantiate_decl_rtl (XEXP (x
, 0));
1758 instantiate_decl_rtl (XEXP (x
, 1));
1762 /* If this is not a MEM, no need to do anything. Similarly if the
1763 address is a constant or a register that is not a virtual register. */
1768 if (CONSTANT_P (addr
)
1770 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1771 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1774 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1777 /* Helper for instantiate_decls called via walk_tree: Process all decls
1778 in the given DECL_VALUE_EXPR. */
1781 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1787 if (DECL_P (t
) && DECL_RTL_SET_P (t
))
1788 instantiate_decl_rtl (DECL_RTL (t
));
1793 /* Subroutine of instantiate_decls: Process all decls in the given
1794 BLOCK node and all its subblocks. */
1797 instantiate_decls_1 (tree let
)
1801 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1803 if (DECL_RTL_SET_P (t
))
1804 instantiate_decl_rtl (DECL_RTL (t
));
1805 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1807 tree v
= DECL_VALUE_EXPR (t
);
1808 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1812 /* Process all subblocks. */
1813 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1814 instantiate_decls_1 (t
);
1817 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1818 all virtual registers in their DECL_RTL's. */
1821 instantiate_decls (tree fndecl
)
1826 /* Process all parameters of the function. */
1827 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1829 instantiate_decl_rtl (DECL_RTL (decl
));
1830 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1831 if (DECL_HAS_VALUE_EXPR_P (decl
))
1833 tree v
= DECL_VALUE_EXPR (decl
);
1834 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1838 /* Now process all variables defined in the function or its subblocks. */
1839 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1841 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1842 if (DECL_RTL_SET_P (decl
))
1843 instantiate_decl_rtl (DECL_RTL (decl
));
1844 VEC_free (tree
, gc
, cfun
->local_decls
);
1847 /* Pass through the INSNS of function FNDECL and convert virtual register
1848 references to hard register references. */
1851 instantiate_virtual_regs (void)
1855 /* Compute the offsets to use for this function. */
1856 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1857 var_offset
= STARTING_FRAME_OFFSET
;
1858 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1859 out_arg_offset
= STACK_POINTER_OFFSET
;
1860 #ifdef FRAME_POINTER_CFA_OFFSET
1861 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1863 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1866 /* Initialize recognition, indicating that volatile is OK. */
1869 /* Scan through all the insns, instantiating every virtual register still
1871 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1874 /* These patterns in the instruction stream can never be recognized.
1875 Fortunately, they shouldn't contain virtual registers either. */
1876 if (GET_CODE (PATTERN (insn
)) == USE
1877 || GET_CODE (PATTERN (insn
)) == CLOBBER
1878 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1879 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1880 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1882 else if (DEBUG_INSN_P (insn
))
1883 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1884 instantiate_virtual_regs_in_rtx
, NULL
);
1886 instantiate_virtual_regs_in_insn (insn
);
1888 if (INSN_DELETED_P (insn
))
1891 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1893 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1895 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1896 instantiate_virtual_regs_in_rtx
, NULL
);
1899 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1900 instantiate_decls (current_function_decl
);
1902 targetm
.instantiate_decls ();
1904 /* Indicate that, from now on, assign_stack_local should use
1905 frame_pointer_rtx. */
1906 virtuals_instantiated
= 1;
1908 /* See allocate_dynamic_stack_space for the rationale. */
1909 #ifdef SETJMP_VIA_SAVE_AREA
1910 if (flag_stack_usage
&& cfun
->calls_setjmp
)
1912 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1913 dynamic_offset
= (dynamic_offset
+ align
- 1) / align
* align
;
1914 current_function_dynamic_stack_size
1915 += current_function_dynamic_alloc_count
* dynamic_offset
;
1922 struct rtl_opt_pass pass_instantiate_virtual_regs
=
1928 instantiate_virtual_regs
, /* execute */
1931 0, /* static_pass_number */
1932 TV_NONE
, /* tv_id */
1933 0, /* properties_required */
1934 0, /* properties_provided */
1935 0, /* properties_destroyed */
1936 0, /* todo_flags_start */
1937 TODO_dump_func
/* todo_flags_finish */
1942 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1943 This means a type for which function calls must pass an address to the
1944 function or get an address back from the function.
1945 EXP may be a type node or an expression (whose type is tested). */
1948 aggregate_value_p (const_tree exp
, const_tree fntype
)
1950 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1951 int i
, regno
, nregs
;
1955 switch (TREE_CODE (fntype
))
1959 tree fndecl
= get_callee_fndecl (fntype
);
1961 ? TREE_TYPE (fndecl
)
1962 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
1966 fntype
= TREE_TYPE (fntype
);
1971 case IDENTIFIER_NODE
:
1975 /* We don't expect other tree types here. */
1979 if (VOID_TYPE_P (type
))
1982 /* If a record should be passed the same as its first (and only) member
1983 don't pass it as an aggregate. */
1984 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
1985 return aggregate_value_p (first_field (type
), fntype
);
1987 /* If the front end has decided that this needs to be passed by
1988 reference, do so. */
1989 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
1990 && DECL_BY_REFERENCE (exp
))
1993 /* Function types that are TREE_ADDRESSABLE force return in memory. */
1994 if (fntype
&& TREE_ADDRESSABLE (fntype
))
1997 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1998 and thus can't be returned in registers. */
1999 if (TREE_ADDRESSABLE (type
))
2002 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2005 if (targetm
.calls
.return_in_memory (type
, fntype
))
2008 /* Make sure we have suitable call-clobbered regs to return
2009 the value in; if not, we must return it in memory. */
2010 reg
= hard_function_value (type
, 0, fntype
, 0);
2012 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2017 regno
= REGNO (reg
);
2018 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2019 for (i
= 0; i
< nregs
; i
++)
2020 if (! call_used_regs
[regno
+ i
])
2026 /* Return true if we should assign DECL a pseudo register; false if it
2027 should live on the local stack. */
2030 use_register_for_decl (const_tree decl
)
2032 if (!targetm
.calls
.allocate_stack_slots_for_args())
2035 /* Honor volatile. */
2036 if (TREE_SIDE_EFFECTS (decl
))
2039 /* Honor addressability. */
2040 if (TREE_ADDRESSABLE (decl
))
2043 /* Only register-like things go in registers. */
2044 if (DECL_MODE (decl
) == BLKmode
)
2047 /* If -ffloat-store specified, don't put explicit float variables
2049 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2050 propagates values across these stores, and it probably shouldn't. */
2051 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2054 /* If we're not interested in tracking debugging information for
2055 this decl, then we can certainly put it in a register. */
2056 if (DECL_IGNORED_P (decl
))
2062 if (!DECL_REGISTER (decl
))
2065 switch (TREE_CODE (TREE_TYPE (decl
)))
2069 case QUAL_UNION_TYPE
:
2070 /* When not optimizing, disregard register keyword for variables with
2071 types containing methods, otherwise the methods won't be callable
2072 from the debugger. */
2073 if (TYPE_METHODS (TREE_TYPE (decl
)))
2083 /* Return true if TYPE should be passed by invisible reference. */
2086 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2087 tree type
, bool named_arg
)
2091 /* If this type contains non-trivial constructors, then it is
2092 forbidden for the middle-end to create any new copies. */
2093 if (TREE_ADDRESSABLE (type
))
2096 /* GCC post 3.4 passes *all* variable sized types by reference. */
2097 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2100 /* If a record type should be passed the same as its first (and only)
2101 member, use the type and mode of that member. */
2102 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2104 type
= TREE_TYPE (first_field (type
));
2105 mode
= TYPE_MODE (type
);
2109 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
2112 /* Return true if TYPE, which is passed by reference, should be callee
2113 copied instead of caller copied. */
2116 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2117 tree type
, bool named_arg
)
2119 if (type
&& TREE_ADDRESSABLE (type
))
2121 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
2124 /* Structures to communicate between the subroutines of assign_parms.
2125 The first holds data persistent across all parameters, the second
2126 is cleared out for each parameter. */
2128 struct assign_parm_data_all
2130 CUMULATIVE_ARGS args_so_far
;
2131 struct args_size stack_args_size
;
2132 tree function_result_decl
;
2134 rtx first_conversion_insn
;
2135 rtx last_conversion_insn
;
2136 HOST_WIDE_INT pretend_args_size
;
2137 HOST_WIDE_INT extra_pretend_bytes
;
2138 int reg_parm_stack_space
;
2141 struct assign_parm_data_one
2147 enum machine_mode nominal_mode
;
2148 enum machine_mode passed_mode
;
2149 enum machine_mode promoted_mode
;
2150 struct locate_and_pad_arg_data locate
;
2152 BOOL_BITFIELD named_arg
: 1;
2153 BOOL_BITFIELD passed_pointer
: 1;
2154 BOOL_BITFIELD on_stack
: 1;
2155 BOOL_BITFIELD loaded_in_reg
: 1;
2158 /* A subroutine of assign_parms. Initialize ALL. */
2161 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2163 tree fntype ATTRIBUTE_UNUSED
;
2165 memset (all
, 0, sizeof (*all
));
2167 fntype
= TREE_TYPE (current_function_decl
);
2169 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2170 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
2172 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
2173 current_function_decl
, -1);
2176 #ifdef REG_PARM_STACK_SPACE
2177 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2181 /* If ARGS contains entries with complex types, split the entry into two
2182 entries of the component type. Return a new list of substitutions are
2183 needed, else the old list. */
2186 split_complex_args (VEC(tree
, heap
) **args
)
2191 FOR_EACH_VEC_ELT (tree
, *args
, i
, p
)
2193 tree type
= TREE_TYPE (p
);
2194 if (TREE_CODE (type
) == COMPLEX_TYPE
2195 && targetm
.calls
.split_complex_arg (type
))
2198 tree subtype
= TREE_TYPE (type
);
2199 bool addressable
= TREE_ADDRESSABLE (p
);
2201 /* Rewrite the PARM_DECL's type with its component. */
2203 TREE_TYPE (p
) = subtype
;
2204 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2205 DECL_MODE (p
) = VOIDmode
;
2206 DECL_SIZE (p
) = NULL
;
2207 DECL_SIZE_UNIT (p
) = NULL
;
2208 /* If this arg must go in memory, put it in a pseudo here.
2209 We can't allow it to go in memory as per normal parms,
2210 because the usual place might not have the imag part
2211 adjacent to the real part. */
2212 DECL_ARTIFICIAL (p
) = addressable
;
2213 DECL_IGNORED_P (p
) = addressable
;
2214 TREE_ADDRESSABLE (p
) = 0;
2216 VEC_replace (tree
, *args
, i
, p
);
2218 /* Build a second synthetic decl. */
2219 decl
= build_decl (EXPR_LOCATION (p
),
2220 PARM_DECL
, NULL_TREE
, subtype
);
2221 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2222 DECL_ARTIFICIAL (decl
) = addressable
;
2223 DECL_IGNORED_P (decl
) = addressable
;
2224 layout_decl (decl
, 0);
2225 VEC_safe_insert (tree
, heap
, *args
, ++i
, decl
);
2230 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2231 the hidden struct return argument, and (abi willing) complex args.
2232 Return the new parameter list. */
2234 static VEC(tree
, heap
) *
2235 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2237 tree fndecl
= current_function_decl
;
2238 tree fntype
= TREE_TYPE (fndecl
);
2239 VEC(tree
, heap
) *fnargs
= NULL
;
2242 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2243 VEC_safe_push (tree
, heap
, fnargs
, arg
);
2245 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2247 /* If struct value address is treated as the first argument, make it so. */
2248 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2249 && ! cfun
->returns_pcc_struct
2250 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2252 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2255 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2256 PARM_DECL
, get_identifier (".result_ptr"), type
);
2257 DECL_ARG_TYPE (decl
) = type
;
2258 DECL_ARTIFICIAL (decl
) = 1;
2259 DECL_NAMELESS (decl
) = 1;
2260 TREE_CONSTANT (decl
) = 1;
2262 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2263 all
->orig_fnargs
= decl
;
2264 VEC_safe_insert (tree
, heap
, fnargs
, 0, decl
);
2266 all
->function_result_decl
= decl
;
2269 /* If the target wants to split complex arguments into scalars, do so. */
2270 if (targetm
.calls
.split_complex_arg
)
2271 split_complex_args (&fnargs
);
2276 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2277 data for the parameter. Incorporate ABI specifics such as pass-by-
2278 reference and type promotion. */
2281 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2282 struct assign_parm_data_one
*data
)
2284 tree nominal_type
, passed_type
;
2285 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2288 memset (data
, 0, sizeof (*data
));
2290 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2292 data
->named_arg
= 1; /* No variadic parms. */
2293 else if (DECL_CHAIN (parm
))
2294 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2295 else if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2296 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2298 data
->named_arg
= 0; /* Treat as variadic. */
2300 nominal_type
= TREE_TYPE (parm
);
2301 passed_type
= DECL_ARG_TYPE (parm
);
2303 /* Look out for errors propagating this far. Also, if the parameter's
2304 type is void then its value doesn't matter. */
2305 if (TREE_TYPE (parm
) == error_mark_node
2306 /* This can happen after weird syntax errors
2307 or if an enum type is defined among the parms. */
2308 || TREE_CODE (parm
) != PARM_DECL
2309 || passed_type
== NULL
2310 || VOID_TYPE_P (nominal_type
))
2312 nominal_type
= passed_type
= void_type_node
;
2313 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2317 /* Find mode of arg as it is passed, and mode of arg as it should be
2318 during execution of this function. */
2319 passed_mode
= TYPE_MODE (passed_type
);
2320 nominal_mode
= TYPE_MODE (nominal_type
);
2322 /* If the parm is to be passed as a transparent union or record, use the
2323 type of the first field for the tests below. We have already verified
2324 that the modes are the same. */
2325 if ((TREE_CODE (passed_type
) == UNION_TYPE
2326 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2327 && TYPE_TRANSPARENT_AGGR (passed_type
))
2328 passed_type
= TREE_TYPE (first_field (passed_type
));
2330 /* See if this arg was passed by invisible reference. */
2331 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2332 passed_type
, data
->named_arg
))
2334 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2335 data
->passed_pointer
= true;
2336 passed_mode
= nominal_mode
= Pmode
;
2339 /* Find mode as it is passed by the ABI. */
2340 unsignedp
= TYPE_UNSIGNED (passed_type
);
2341 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2342 TREE_TYPE (current_function_decl
), 0);
2345 data
->nominal_type
= nominal_type
;
2346 data
->passed_type
= passed_type
;
2347 data
->nominal_mode
= nominal_mode
;
2348 data
->passed_mode
= passed_mode
;
2349 data
->promoted_mode
= promoted_mode
;
2352 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2355 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2356 struct assign_parm_data_one
*data
, bool no_rtl
)
2358 int varargs_pretend_bytes
= 0;
2360 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2361 data
->promoted_mode
,
2363 &varargs_pretend_bytes
, no_rtl
);
2365 /* If the back-end has requested extra stack space, record how much is
2366 needed. Do not change pretend_args_size otherwise since it may be
2367 nonzero from an earlier partial argument. */
2368 if (varargs_pretend_bytes
> 0)
2369 all
->pretend_args_size
= varargs_pretend_bytes
;
2372 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2373 the incoming location of the current parameter. */
2376 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2377 struct assign_parm_data_one
*data
)
2379 HOST_WIDE_INT pretend_bytes
= 0;
2383 if (data
->promoted_mode
== VOIDmode
)
2385 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2389 entry_parm
= targetm
.calls
.function_incoming_arg (&all
->args_so_far
,
2390 data
->promoted_mode
,
2394 if (entry_parm
== 0)
2395 data
->promoted_mode
= data
->passed_mode
;
2397 /* Determine parm's home in the stack, in case it arrives in the stack
2398 or we should pretend it did. Compute the stack position and rtx where
2399 the argument arrives and its size.
2401 There is one complexity here: If this was a parameter that would
2402 have been passed in registers, but wasn't only because it is
2403 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2404 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2405 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2406 as it was the previous time. */
2407 in_regs
= entry_parm
!= 0;
2408 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2411 if (!in_regs
&& !data
->named_arg
)
2413 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2416 tem
= targetm
.calls
.function_incoming_arg (&all
->args_so_far
,
2417 data
->promoted_mode
,
2418 data
->passed_type
, true);
2419 in_regs
= tem
!= NULL
;
2423 /* If this parameter was passed both in registers and in the stack, use
2424 the copy on the stack. */
2425 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2433 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2434 data
->promoted_mode
,
2437 data
->partial
= partial
;
2439 /* The caller might already have allocated stack space for the
2440 register parameters. */
2441 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2443 /* Part of this argument is passed in registers and part
2444 is passed on the stack. Ask the prologue code to extend
2445 the stack part so that we can recreate the full value.
2447 PRETEND_BYTES is the size of the registers we need to store.
2448 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2449 stack space that the prologue should allocate.
2451 Internally, gcc assumes that the argument pointer is aligned
2452 to STACK_BOUNDARY bits. This is used both for alignment
2453 optimizations (see init_emit) and to locate arguments that are
2454 aligned to more than PARM_BOUNDARY bits. We must preserve this
2455 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2456 a stack boundary. */
2458 /* We assume at most one partial arg, and it must be the first
2459 argument on the stack. */
2460 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2462 pretend_bytes
= partial
;
2463 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2465 /* We want to align relative to the actual stack pointer, so
2466 don't include this in the stack size until later. */
2467 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2471 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2472 entry_parm
? data
->partial
: 0, current_function_decl
,
2473 &all
->stack_args_size
, &data
->locate
);
2475 /* Update parm_stack_boundary if this parameter is passed in the
2477 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2478 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2480 /* Adjust offsets to include the pretend args. */
2481 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2482 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2483 data
->locate
.offset
.constant
+= pretend_bytes
;
2485 data
->entry_parm
= entry_parm
;
2488 /* A subroutine of assign_parms. If there is actually space on the stack
2489 for this parm, count it in stack_args_size and return true. */
2492 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2493 struct assign_parm_data_one
*data
)
2495 /* Trivially true if we've no incoming register. */
2496 if (data
->entry_parm
== NULL
)
2498 /* Also true if we're partially in registers and partially not,
2499 since we've arranged to drop the entire argument on the stack. */
2500 else if (data
->partial
!= 0)
2502 /* Also true if the target says that it's passed in both registers
2503 and on the stack. */
2504 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2505 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2507 /* Also true if the target says that there's stack allocated for
2508 all register parameters. */
2509 else if (all
->reg_parm_stack_space
> 0)
2511 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2515 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2516 if (data
->locate
.size
.var
)
2517 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2522 /* A subroutine of assign_parms. Given that this parameter is allocated
2523 stack space by the ABI, find it. */
2526 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2528 rtx offset_rtx
, stack_parm
;
2529 unsigned int align
, boundary
;
2531 /* If we're passing this arg using a reg, make its stack home the
2532 aligned stack slot. */
2533 if (data
->entry_parm
)
2534 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2536 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2538 stack_parm
= crtl
->args
.internal_arg_pointer
;
2539 if (offset_rtx
!= const0_rtx
)
2540 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2541 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2543 if (!data
->passed_pointer
)
2545 set_mem_attributes (stack_parm
, parm
, 1);
2546 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2547 while promoted mode's size is needed. */
2548 if (data
->promoted_mode
!= BLKmode
2549 && data
->promoted_mode
!= DECL_MODE (parm
))
2551 set_mem_size (stack_parm
,
2552 GEN_INT (GET_MODE_SIZE (data
->promoted_mode
)));
2553 if (MEM_EXPR (stack_parm
) && MEM_OFFSET (stack_parm
))
2555 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2556 data
->promoted_mode
);
2558 set_mem_offset (stack_parm
,
2559 plus_constant (MEM_OFFSET (stack_parm
),
2565 boundary
= data
->locate
.boundary
;
2566 align
= BITS_PER_UNIT
;
2568 /* If we're padding upward, we know that the alignment of the slot
2569 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2570 intentionally forcing upward padding. Otherwise we have to come
2571 up with a guess at the alignment based on OFFSET_RTX. */
2572 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2574 else if (CONST_INT_P (offset_rtx
))
2576 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2577 align
= align
& -align
;
2579 set_mem_align (stack_parm
, align
);
2581 if (data
->entry_parm
)
2582 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2584 data
->stack_parm
= stack_parm
;
2587 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2588 always valid and contiguous. */
2591 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2593 rtx entry_parm
= data
->entry_parm
;
2594 rtx stack_parm
= data
->stack_parm
;
2596 /* If this parm was passed part in regs and part in memory, pretend it
2597 arrived entirely in memory by pushing the register-part onto the stack.
2598 In the special case of a DImode or DFmode that is split, we could put
2599 it together in a pseudoreg directly, but for now that's not worth
2601 if (data
->partial
!= 0)
2603 /* Handle calls that pass values in multiple non-contiguous
2604 locations. The Irix 6 ABI has examples of this. */
2605 if (GET_CODE (entry_parm
) == PARALLEL
)
2606 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2608 int_size_in_bytes (data
->passed_type
));
2611 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2612 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2613 data
->partial
/ UNITS_PER_WORD
);
2616 entry_parm
= stack_parm
;
2619 /* If we didn't decide this parm came in a register, by default it came
2621 else if (entry_parm
== NULL
)
2622 entry_parm
= stack_parm
;
2624 /* When an argument is passed in multiple locations, we can't make use
2625 of this information, but we can save some copying if the whole argument
2626 is passed in a single register. */
2627 else if (GET_CODE (entry_parm
) == PARALLEL
2628 && data
->nominal_mode
!= BLKmode
2629 && data
->passed_mode
!= BLKmode
)
2631 size_t i
, len
= XVECLEN (entry_parm
, 0);
2633 for (i
= 0; i
< len
; i
++)
2634 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2635 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2636 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2637 == data
->passed_mode
)
2638 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2640 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2645 data
->entry_parm
= entry_parm
;
2648 /* A subroutine of assign_parms. Reconstitute any values which were
2649 passed in multiple registers and would fit in a single register. */
2652 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2654 rtx entry_parm
= data
->entry_parm
;
2656 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2657 This can be done with register operations rather than on the
2658 stack, even if we will store the reconstituted parameter on the
2660 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2662 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2663 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2664 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2665 entry_parm
= parmreg
;
2668 data
->entry_parm
= entry_parm
;
2671 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2672 always valid and properly aligned. */
2675 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2677 rtx stack_parm
= data
->stack_parm
;
2679 /* If we can't trust the parm stack slot to be aligned enough for its
2680 ultimate type, don't use that slot after entry. We'll make another
2681 stack slot, if we need one. */
2683 && ((STRICT_ALIGNMENT
2684 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2685 || (data
->nominal_type
2686 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2687 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2690 /* If parm was passed in memory, and we need to convert it on entry,
2691 don't store it back in that same slot. */
2692 else if (data
->entry_parm
== stack_parm
2693 && data
->nominal_mode
!= BLKmode
2694 && data
->nominal_mode
!= data
->passed_mode
)
2697 /* If stack protection is in effect for this function, don't leave any
2698 pointers in their passed stack slots. */
2699 else if (crtl
->stack_protect_guard
2700 && (flag_stack_protect
== 2
2701 || data
->passed_pointer
2702 || POINTER_TYPE_P (data
->nominal_type
)))
2705 data
->stack_parm
= stack_parm
;
2708 /* A subroutine of assign_parms. Return true if the current parameter
2709 should be stored as a BLKmode in the current frame. */
2712 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2714 if (data
->nominal_mode
== BLKmode
)
2716 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2719 #ifdef BLOCK_REG_PADDING
2720 /* Only assign_parm_setup_block knows how to deal with register arguments
2721 that are padded at the least significant end. */
2722 if (REG_P (data
->entry_parm
)
2723 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2724 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2725 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2732 /* A subroutine of assign_parms. Arrange for the parameter to be
2733 present and valid in DATA->STACK_RTL. */
2736 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2737 tree parm
, struct assign_parm_data_one
*data
)
2739 rtx entry_parm
= data
->entry_parm
;
2740 rtx stack_parm
= data
->stack_parm
;
2742 HOST_WIDE_INT size_stored
;
2744 if (GET_CODE (entry_parm
) == PARALLEL
)
2745 entry_parm
= emit_group_move_into_temps (entry_parm
);
2747 size
= int_size_in_bytes (data
->passed_type
);
2748 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2749 if (stack_parm
== 0)
2751 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2752 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2754 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2755 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2756 set_mem_attributes (stack_parm
, parm
, 1);
2759 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2760 calls that pass values in multiple non-contiguous locations. */
2761 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2765 /* Note that we will be storing an integral number of words.
2766 So we have to be careful to ensure that we allocate an
2767 integral number of words. We do this above when we call
2768 assign_stack_local if space was not allocated in the argument
2769 list. If it was, this will not work if PARM_BOUNDARY is not
2770 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2771 if it becomes a problem. Exception is when BLKmode arrives
2772 with arguments not conforming to word_mode. */
2774 if (data
->stack_parm
== 0)
2776 else if (GET_CODE (entry_parm
) == PARALLEL
)
2779 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2781 mem
= validize_mem (stack_parm
);
2783 /* Handle values in multiple non-contiguous locations. */
2784 if (GET_CODE (entry_parm
) == PARALLEL
)
2786 push_to_sequence2 (all
->first_conversion_insn
,
2787 all
->last_conversion_insn
);
2788 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2789 all
->first_conversion_insn
= get_insns ();
2790 all
->last_conversion_insn
= get_last_insn ();
2797 /* If SIZE is that of a mode no bigger than a word, just use
2798 that mode's store operation. */
2799 else if (size
<= UNITS_PER_WORD
)
2801 enum machine_mode mode
2802 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2805 #ifdef BLOCK_REG_PADDING
2806 && (size
== UNITS_PER_WORD
2807 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2808 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2814 /* We are really truncating a word_mode value containing
2815 SIZE bytes into a value of mode MODE. If such an
2816 operation requires no actual instructions, we can refer
2817 to the value directly in mode MODE, otherwise we must
2818 start with the register in word_mode and explicitly
2820 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2821 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2824 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2825 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2827 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2830 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2831 machine must be aligned to the left before storing
2832 to memory. Note that the previous test doesn't
2833 handle all cases (e.g. SIZE == 3). */
2834 else if (size
!= UNITS_PER_WORD
2835 #ifdef BLOCK_REG_PADDING
2836 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2844 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2845 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2847 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2848 build_int_cst (NULL_TREE
, by
),
2850 tem
= change_address (mem
, word_mode
, 0);
2851 emit_move_insn (tem
, x
);
2854 move_block_from_reg (REGNO (entry_parm
), mem
,
2855 size_stored
/ UNITS_PER_WORD
);
2858 move_block_from_reg (REGNO (entry_parm
), mem
,
2859 size_stored
/ UNITS_PER_WORD
);
2861 else if (data
->stack_parm
== 0)
2863 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2864 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2866 all
->first_conversion_insn
= get_insns ();
2867 all
->last_conversion_insn
= get_last_insn ();
2871 data
->stack_parm
= stack_parm
;
2872 SET_DECL_RTL (parm
, stack_parm
);
2875 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2876 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2877 which is pointed to by DATA. */
2879 record_hard_reg_sets (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
2881 HARD_REG_SET
*pset
= (HARD_REG_SET
*)data
;
2882 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2884 int nregs
= hard_regno_nregs
[REGNO (x
)][GET_MODE (x
)];
2886 SET_HARD_REG_BIT (*pset
, REGNO (x
) + nregs
);
2890 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2891 parameter. Get it there. Perform all ABI specified conversions. */
2894 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2895 struct assign_parm_data_one
*data
)
2897 rtx parmreg
, validated_mem
;
2898 rtx equiv_stack_parm
;
2899 enum machine_mode promoted_nominal_mode
;
2900 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2901 bool did_conversion
= false;
2902 bool need_conversion
, moved
;
2904 /* Store the parm in a pseudoregister during the function, but we may
2905 need to do it in a wider mode. Using 2 here makes the result
2906 consistent with promote_decl_mode and thus expand_expr_real_1. */
2907 promoted_nominal_mode
2908 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2909 TREE_TYPE (current_function_decl
), 2);
2911 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2913 if (!DECL_ARTIFICIAL (parm
))
2914 mark_user_reg (parmreg
);
2916 /* If this was an item that we received a pointer to,
2917 set DECL_RTL appropriately. */
2918 if (data
->passed_pointer
)
2920 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2921 set_mem_attributes (x
, parm
, 1);
2922 SET_DECL_RTL (parm
, x
);
2925 SET_DECL_RTL (parm
, parmreg
);
2927 assign_parm_remove_parallels (data
);
2929 /* Copy the value into the register, thus bridging between
2930 assign_parm_find_data_types and expand_expr_real_1. */
2932 equiv_stack_parm
= data
->stack_parm
;
2933 validated_mem
= validize_mem (data
->entry_parm
);
2935 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2936 || promoted_nominal_mode
!= data
->promoted_mode
);
2940 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2941 && data
->nominal_mode
== data
->passed_mode
2942 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2944 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2945 mode, by the caller. We now have to convert it to
2946 NOMINAL_MODE, if different. However, PARMREG may be in
2947 a different mode than NOMINAL_MODE if it is being stored
2950 If ENTRY_PARM is a hard register, it might be in a register
2951 not valid for operating in its mode (e.g., an odd-numbered
2952 register for a DFmode). In that case, moves are the only
2953 thing valid, so we can't do a convert from there. This
2954 occurs when the calling sequence allow such misaligned
2957 In addition, the conversion may involve a call, which could
2958 clobber parameters which haven't been copied to pseudo
2961 First, we try to emit an insn which performs the necessary
2962 conversion. We verify that this insn does not clobber any
2965 enum insn_code icode
;
2968 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
2972 op1
= validated_mem
;
2973 if (icode
!= CODE_FOR_nothing
2974 && insn_data
[icode
].operand
[0].predicate (op0
, promoted_nominal_mode
)
2975 && insn_data
[icode
].operand
[1].predicate (op1
, data
->passed_mode
))
2977 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
2979 HARD_REG_SET hardregs
;
2982 insn
= gen_extend_insn (op0
, op1
, promoted_nominal_mode
,
2983 data
->passed_mode
, unsignedp
);
2985 insns
= get_insns ();
2988 CLEAR_HARD_REG_SET (hardregs
);
2989 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
2992 note_stores (PATTERN (insn
), record_hard_reg_sets
,
2994 if (!hard_reg_set_empty_p (hardregs
))
3003 if (equiv_stack_parm
!= NULL_RTX
)
3004 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3011 /* Nothing to do. */
3013 else if (need_conversion
)
3015 /* We did not have an insn to convert directly, or the sequence
3016 generated appeared unsafe. We must first copy the parm to a
3017 pseudo reg, and save the conversion until after all
3018 parameters have been moved. */
3021 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3023 emit_move_insn (tempreg
, validated_mem
);
3025 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3026 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3028 if (GET_CODE (tempreg
) == SUBREG
3029 && GET_MODE (tempreg
) == data
->nominal_mode
3030 && REG_P (SUBREG_REG (tempreg
))
3031 && data
->nominal_mode
== data
->passed_mode
3032 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3033 && GET_MODE_SIZE (GET_MODE (tempreg
))
3034 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3036 /* The argument is already sign/zero extended, so note it
3038 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3039 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3042 /* TREE_USED gets set erroneously during expand_assignment. */
3043 save_tree_used
= TREE_USED (parm
);
3044 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3045 TREE_USED (parm
) = save_tree_used
;
3046 all
->first_conversion_insn
= get_insns ();
3047 all
->last_conversion_insn
= get_last_insn ();
3050 did_conversion
= true;
3053 emit_move_insn (parmreg
, validated_mem
);
3055 /* If we were passed a pointer but the actual value can safely live
3056 in a register, put it in one. */
3057 if (data
->passed_pointer
3058 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3059 /* If by-reference argument was promoted, demote it. */
3060 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
3061 || use_register_for_decl (parm
)))
3063 /* We can't use nominal_mode, because it will have been set to
3064 Pmode above. We must use the actual mode of the parm. */
3065 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3066 mark_user_reg (parmreg
);
3068 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3070 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3071 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3073 push_to_sequence2 (all
->first_conversion_insn
,
3074 all
->last_conversion_insn
);
3075 emit_move_insn (tempreg
, DECL_RTL (parm
));
3076 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3077 emit_move_insn (parmreg
, tempreg
);
3078 all
->first_conversion_insn
= get_insns ();
3079 all
->last_conversion_insn
= get_last_insn ();
3082 did_conversion
= true;
3085 emit_move_insn (parmreg
, DECL_RTL (parm
));
3087 SET_DECL_RTL (parm
, parmreg
);
3089 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3091 data
->stack_parm
= NULL
;
3094 /* Mark the register as eliminable if we did no conversion and it was
3095 copied from memory at a fixed offset, and the arg pointer was not
3096 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3097 offset formed an invalid address, such memory-equivalences as we
3098 make here would screw up life analysis for it. */
3099 if (data
->nominal_mode
== data
->passed_mode
3101 && data
->stack_parm
!= 0
3102 && MEM_P (data
->stack_parm
)
3103 && data
->locate
.offset
.var
== 0
3104 && reg_mentioned_p (virtual_incoming_args_rtx
,
3105 XEXP (data
->stack_parm
, 0)))
3107 rtx linsn
= get_last_insn ();
3110 /* Mark complex types separately. */
3111 if (GET_CODE (parmreg
) == CONCAT
)
3113 enum machine_mode submode
3114 = GET_MODE_INNER (GET_MODE (parmreg
));
3115 int regnor
= REGNO (XEXP (parmreg
, 0));
3116 int regnoi
= REGNO (XEXP (parmreg
, 1));
3117 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3118 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3119 GET_MODE_SIZE (submode
));
3121 /* Scan backwards for the set of the real and
3123 for (sinsn
= linsn
; sinsn
!= 0;
3124 sinsn
= prev_nonnote_insn (sinsn
))
3126 set
= single_set (sinsn
);
3130 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3131 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3132 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3133 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3136 else if ((set
= single_set (linsn
)) != 0
3137 && SET_DEST (set
) == parmreg
)
3138 set_unique_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
);
3141 /* For pointer data type, suggest pointer register. */
3142 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3143 mark_reg_pointer (parmreg
,
3144 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3147 /* A subroutine of assign_parms. Allocate stack space to hold the current
3148 parameter. Get it there. Perform all ABI specified conversions. */
3151 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3152 struct assign_parm_data_one
*data
)
3154 /* Value must be stored in the stack slot STACK_PARM during function
3156 bool to_conversion
= false;
3158 assign_parm_remove_parallels (data
);
3160 if (data
->promoted_mode
!= data
->nominal_mode
)
3162 /* Conversion is required. */
3163 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3165 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3167 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3168 to_conversion
= true;
3170 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3171 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3173 if (data
->stack_parm
)
3175 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3176 GET_MODE (data
->stack_parm
));
3177 /* ??? This may need a big-endian conversion on sparc64. */
3179 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3180 if (offset
&& MEM_OFFSET (data
->stack_parm
))
3181 set_mem_offset (data
->stack_parm
,
3182 plus_constant (MEM_OFFSET (data
->stack_parm
),
3187 if (data
->entry_parm
!= data
->stack_parm
)
3191 if (data
->stack_parm
== 0)
3193 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3194 GET_MODE (data
->entry_parm
),
3195 TYPE_ALIGN (data
->passed_type
));
3197 = assign_stack_local (GET_MODE (data
->entry_parm
),
3198 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3200 set_mem_attributes (data
->stack_parm
, parm
, 1);
3203 dest
= validize_mem (data
->stack_parm
);
3204 src
= validize_mem (data
->entry_parm
);
3208 /* Use a block move to handle potentially misaligned entry_parm. */
3210 push_to_sequence2 (all
->first_conversion_insn
,
3211 all
->last_conversion_insn
);
3212 to_conversion
= true;
3214 emit_block_move (dest
, src
,
3215 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3219 emit_move_insn (dest
, src
);
3224 all
->first_conversion_insn
= get_insns ();
3225 all
->last_conversion_insn
= get_last_insn ();
3229 SET_DECL_RTL (parm
, data
->stack_parm
);
3232 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3233 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3236 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3237 VEC(tree
, heap
) *fnargs
)
3240 tree orig_fnargs
= all
->orig_fnargs
;
3243 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3245 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3246 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3248 rtx tmp
, real
, imag
;
3249 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3251 real
= DECL_RTL (VEC_index (tree
, fnargs
, i
));
3252 imag
= DECL_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3253 if (inner
!= GET_MODE (real
))
3255 real
= gen_lowpart_SUBREG (inner
, real
);
3256 imag
= gen_lowpart_SUBREG (inner
, imag
);
3259 if (TREE_ADDRESSABLE (parm
))
3262 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3263 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3265 TYPE_ALIGN (TREE_TYPE (parm
)));
3267 /* split_complex_arg put the real and imag parts in
3268 pseudos. Move them to memory. */
3269 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3270 set_mem_attributes (tmp
, parm
, 1);
3271 rmem
= adjust_address_nv (tmp
, inner
, 0);
3272 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3273 push_to_sequence2 (all
->first_conversion_insn
,
3274 all
->last_conversion_insn
);
3275 emit_move_insn (rmem
, real
);
3276 emit_move_insn (imem
, imag
);
3277 all
->first_conversion_insn
= get_insns ();
3278 all
->last_conversion_insn
= get_last_insn ();
3282 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3283 SET_DECL_RTL (parm
, tmp
);
3285 real
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
));
3286 imag
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3287 if (inner
!= GET_MODE (real
))
3289 real
= gen_lowpart_SUBREG (inner
, real
);
3290 imag
= gen_lowpart_SUBREG (inner
, imag
);
3292 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3293 set_decl_incoming_rtl (parm
, tmp
, false);
3299 /* Assign RTL expressions to the function's parameters. This may involve
3300 copying them into registers and using those registers as the DECL_RTL. */
3303 assign_parms (tree fndecl
)
3305 struct assign_parm_data_all all
;
3307 VEC(tree
, heap
) *fnargs
;
3310 crtl
->args
.internal_arg_pointer
3311 = targetm
.calls
.internal_arg_pointer ();
3313 assign_parms_initialize_all (&all
);
3314 fnargs
= assign_parms_augmented_arg_list (&all
);
3316 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3318 struct assign_parm_data_one data
;
3320 /* Extract the type of PARM; adjust it according to ABI. */
3321 assign_parm_find_data_types (&all
, parm
, &data
);
3323 /* Early out for errors and void parameters. */
3324 if (data
.passed_mode
== VOIDmode
)
3326 SET_DECL_RTL (parm
, const0_rtx
);
3327 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3331 /* Estimate stack alignment from parameter alignment. */
3332 if (SUPPORTS_STACK_ALIGNMENT
)
3335 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3337 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3339 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3340 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3341 TYPE_MODE (data
.nominal_type
),
3342 TYPE_ALIGN (data
.nominal_type
));
3343 if (crtl
->stack_alignment_estimated
< align
)
3345 gcc_assert (!crtl
->stack_realign_processed
);
3346 crtl
->stack_alignment_estimated
= align
;
3350 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3351 assign_parms_setup_varargs (&all
, &data
, false);
3353 /* Find out where the parameter arrives in this function. */
3354 assign_parm_find_entry_rtl (&all
, &data
);
3356 /* Find out where stack space for this parameter might be. */
3357 if (assign_parm_is_stack_parm (&all
, &data
))
3359 assign_parm_find_stack_rtl (parm
, &data
);
3360 assign_parm_adjust_entry_rtl (&data
);
3363 /* Record permanently how this parm was passed. */
3364 set_decl_incoming_rtl (parm
, data
.entry_parm
, data
.passed_pointer
);
3366 /* Update info on where next arg arrives in registers. */
3367 targetm
.calls
.function_arg_advance (&all
.args_so_far
, data
.promoted_mode
,
3368 data
.passed_type
, data
.named_arg
);
3370 assign_parm_adjust_stack_rtl (&data
);
3372 if (assign_parm_setup_block_p (&data
))
3373 assign_parm_setup_block (&all
, parm
, &data
);
3374 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3375 assign_parm_setup_reg (&all
, parm
, &data
);
3377 assign_parm_setup_stack (&all
, parm
, &data
);
3380 if (targetm
.calls
.split_complex_arg
)
3381 assign_parms_unsplit_complex (&all
, fnargs
);
3383 VEC_free (tree
, heap
, fnargs
);
3385 /* Output all parameter conversion instructions (possibly including calls)
3386 now that all parameters have been copied out of hard registers. */
3387 emit_insn (all
.first_conversion_insn
);
3389 /* Estimate reload stack alignment from scalar return mode. */
3390 if (SUPPORTS_STACK_ALIGNMENT
)
3392 if (DECL_RESULT (fndecl
))
3394 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3395 enum machine_mode mode
= TYPE_MODE (type
);
3399 && !AGGREGATE_TYPE_P (type
))
3401 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3402 if (crtl
->stack_alignment_estimated
< align
)
3404 gcc_assert (!crtl
->stack_realign_processed
);
3405 crtl
->stack_alignment_estimated
= align
;
3411 /* If we are receiving a struct value address as the first argument, set up
3412 the RTL for the function result. As this might require code to convert
3413 the transmitted address to Pmode, we do this here to ensure that possible
3414 preliminary conversions of the address have been emitted already. */
3415 if (all
.function_result_decl
)
3417 tree result
= DECL_RESULT (current_function_decl
);
3418 rtx addr
= DECL_RTL (all
.function_result_decl
);
3421 if (DECL_BY_REFERENCE (result
))
3423 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3428 SET_DECL_VALUE_EXPR (result
,
3429 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3430 all
.function_result_decl
));
3431 addr
= convert_memory_address (Pmode
, addr
);
3432 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3433 set_mem_attributes (x
, result
, 1);
3436 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3438 SET_DECL_RTL (result
, x
);
3441 /* We have aligned all the args, so add space for the pretend args. */
3442 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3443 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3444 crtl
->args
.size
= all
.stack_args_size
.constant
;
3446 /* Adjust function incoming argument size for alignment and
3449 #ifdef REG_PARM_STACK_SPACE
3450 crtl
->args
.size
= MAX (crtl
->args
.size
,
3451 REG_PARM_STACK_SPACE (fndecl
));
3454 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3455 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3457 #ifdef ARGS_GROW_DOWNWARD
3458 crtl
->args
.arg_offset_rtx
3459 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3460 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3461 size_int (-all
.stack_args_size
.constant
)),
3462 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3464 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3467 /* See how many bytes, if any, of its args a function should try to pop
3470 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3474 /* For stdarg.h function, save info about
3475 regs and stack space used by the named args. */
3477 crtl
->args
.info
= all
.args_so_far
;
3479 /* Set the rtx used for the function return value. Put this in its
3480 own variable so any optimizers that need this information don't have
3481 to include tree.h. Do this here so it gets done when an inlined
3482 function gets output. */
3485 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3486 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3488 /* If scalar return value was computed in a pseudo-reg, or was a named
3489 return value that got dumped to the stack, copy that to the hard
3491 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3493 tree decl_result
= DECL_RESULT (fndecl
);
3494 rtx decl_rtl
= DECL_RTL (decl_result
);
3496 if (REG_P (decl_rtl
)
3497 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3498 : DECL_REGISTER (decl_result
))
3502 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3504 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3505 /* The delay slot scheduler assumes that crtl->return_rtx
3506 holds the hard register containing the return value, not a
3507 temporary pseudo. */
3508 crtl
->return_rtx
= real_decl_rtl
;
3513 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3514 For all seen types, gimplify their sizes. */
3517 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3524 if (POINTER_TYPE_P (t
))
3526 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3527 && !TYPE_SIZES_GIMPLIFIED (t
))
3529 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3537 /* Gimplify the parameter list for current_function_decl. This involves
3538 evaluating SAVE_EXPRs of variable sized parameters and generating code
3539 to implement callee-copies reference parameters. Returns a sequence of
3540 statements to add to the beginning of the function. */
3543 gimplify_parameters (void)
3545 struct assign_parm_data_all all
;
3547 gimple_seq stmts
= NULL
;
3548 VEC(tree
, heap
) *fnargs
;
3551 assign_parms_initialize_all (&all
);
3552 fnargs
= assign_parms_augmented_arg_list (&all
);
3554 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3556 struct assign_parm_data_one data
;
3558 /* Extract the type of PARM; adjust it according to ABI. */
3559 assign_parm_find_data_types (&all
, parm
, &data
);
3561 /* Early out for errors and void parameters. */
3562 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3565 /* Update info on where next arg arrives in registers. */
3566 targetm
.calls
.function_arg_advance (&all
.args_so_far
, data
.promoted_mode
,
3567 data
.passed_type
, data
.named_arg
);
3569 /* ??? Once upon a time variable_size stuffed parameter list
3570 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3571 turned out to be less than manageable in the gimple world.
3572 Now we have to hunt them down ourselves. */
3573 walk_tree_without_duplicates (&data
.passed_type
,
3574 gimplify_parm_type
, &stmts
);
3576 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3578 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3579 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3582 if (data
.passed_pointer
)
3584 tree type
= TREE_TYPE (data
.passed_type
);
3585 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3586 type
, data
.named_arg
))
3590 /* For constant-sized objects, this is trivial; for
3591 variable-sized objects, we have to play games. */
3592 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3593 && !(flag_stack_check
== GENERIC_STACK_CHECK
3594 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3595 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3597 local
= create_tmp_reg (type
, get_name (parm
));
3598 DECL_IGNORED_P (local
) = 0;
3599 /* If PARM was addressable, move that flag over
3600 to the local copy, as its address will be taken,
3601 not the PARMs. Keep the parms address taken
3602 as we'll query that flag during gimplification. */
3603 if (TREE_ADDRESSABLE (parm
))
3604 TREE_ADDRESSABLE (local
) = 1;
3608 tree ptr_type
, addr
;
3610 ptr_type
= build_pointer_type (type
);
3611 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3612 DECL_IGNORED_P (addr
) = 0;
3613 local
= build_fold_indirect_ref (addr
);
3615 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3616 t
= build_call_expr (t
, 1, DECL_SIZE_UNIT (parm
));
3617 /* The call has been built for a variable-sized object. */
3618 ALLOCA_FOR_VAR_P (t
) = 1;
3619 t
= fold_convert (ptr_type
, t
);
3620 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3621 gimplify_and_add (t
, &stmts
);
3624 gimplify_assign (local
, parm
, &stmts
);
3626 SET_DECL_VALUE_EXPR (parm
, local
);
3627 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3632 VEC_free (tree
, heap
, fnargs
);
3637 /* Compute the size and offset from the start of the stacked arguments for a
3638 parm passed in mode PASSED_MODE and with type TYPE.
3640 INITIAL_OFFSET_PTR points to the current offset into the stacked
3643 The starting offset and size for this parm are returned in
3644 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3645 nonzero, the offset is that of stack slot, which is returned in
3646 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3647 padding required from the initial offset ptr to the stack slot.
3649 IN_REGS is nonzero if the argument will be passed in registers. It will
3650 never be set if REG_PARM_STACK_SPACE is not defined.
3652 FNDECL is the function in which the argument was defined.
3654 There are two types of rounding that are done. The first, controlled by
3655 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3656 argument list to be aligned to the specific boundary (in bits). This
3657 rounding affects the initial and starting offsets, but not the argument
3660 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3661 optionally rounds the size of the parm to PARM_BOUNDARY. The
3662 initial offset is not affected by this rounding, while the size always
3663 is and the starting offset may be. */
3665 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3666 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3667 callers pass in the total size of args so far as
3668 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3671 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3672 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3673 struct args_size
*initial_offset_ptr
,
3674 struct locate_and_pad_arg_data
*locate
)
3677 enum direction where_pad
;
3678 unsigned int boundary
;
3679 int reg_parm_stack_space
= 0;
3680 int part_size_in_regs
;
3682 #ifdef REG_PARM_STACK_SPACE
3683 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3685 /* If we have found a stack parm before we reach the end of the
3686 area reserved for registers, skip that area. */
3689 if (reg_parm_stack_space
> 0)
3691 if (initial_offset_ptr
->var
)
3693 initial_offset_ptr
->var
3694 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3695 ssize_int (reg_parm_stack_space
));
3696 initial_offset_ptr
->constant
= 0;
3698 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3699 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3702 #endif /* REG_PARM_STACK_SPACE */
3704 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3707 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3708 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3709 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3710 locate
->where_pad
= where_pad
;
3712 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3713 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3714 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3716 locate
->boundary
= boundary
;
3718 if (SUPPORTS_STACK_ALIGNMENT
)
3720 /* stack_alignment_estimated can't change after stack has been
3722 if (crtl
->stack_alignment_estimated
< boundary
)
3724 if (!crtl
->stack_realign_processed
)
3725 crtl
->stack_alignment_estimated
= boundary
;
3728 /* If stack is realigned and stack alignment value
3729 hasn't been finalized, it is OK not to increase
3730 stack_alignment_estimated. The bigger alignment
3731 requirement is recorded in stack_alignment_needed
3733 gcc_assert (!crtl
->stack_realign_finalized
3734 && crtl
->stack_realign_needed
);
3739 /* Remember if the outgoing parameter requires extra alignment on the
3740 calling function side. */
3741 if (crtl
->stack_alignment_needed
< boundary
)
3742 crtl
->stack_alignment_needed
= boundary
;
3743 if (crtl
->preferred_stack_boundary
< boundary
)
3744 crtl
->preferred_stack_boundary
= boundary
;
3746 #ifdef ARGS_GROW_DOWNWARD
3747 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3748 if (initial_offset_ptr
->var
)
3749 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3750 initial_offset_ptr
->var
);
3754 if (where_pad
!= none
3755 && (!host_integerp (sizetree
, 1)
3756 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3757 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3758 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3761 locate
->slot_offset
.constant
+= part_size_in_regs
;
3764 #ifdef REG_PARM_STACK_SPACE
3765 || REG_PARM_STACK_SPACE (fndecl
) > 0
3768 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3769 &locate
->alignment_pad
);
3771 locate
->size
.constant
= (-initial_offset_ptr
->constant
3772 - locate
->slot_offset
.constant
);
3773 if (initial_offset_ptr
->var
)
3774 locate
->size
.var
= size_binop (MINUS_EXPR
,
3775 size_binop (MINUS_EXPR
,
3777 initial_offset_ptr
->var
),
3778 locate
->slot_offset
.var
);
3780 /* Pad_below needs the pre-rounded size to know how much to pad
3782 locate
->offset
= locate
->slot_offset
;
3783 if (where_pad
== downward
)
3784 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3786 #else /* !ARGS_GROW_DOWNWARD */
3788 #ifdef REG_PARM_STACK_SPACE
3789 || REG_PARM_STACK_SPACE (fndecl
) > 0
3792 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3793 &locate
->alignment_pad
);
3794 locate
->slot_offset
= *initial_offset_ptr
;
3796 #ifdef PUSH_ROUNDING
3797 if (passed_mode
!= BLKmode
)
3798 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3801 /* Pad_below needs the pre-rounded size to know how much to pad below
3802 so this must be done before rounding up. */
3803 locate
->offset
= locate
->slot_offset
;
3804 if (where_pad
== downward
)
3805 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3807 if (where_pad
!= none
3808 && (!host_integerp (sizetree
, 1)
3809 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3810 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3812 ADD_PARM_SIZE (locate
->size
, sizetree
);
3814 locate
->size
.constant
-= part_size_in_regs
;
3815 #endif /* ARGS_GROW_DOWNWARD */
3817 #ifdef FUNCTION_ARG_OFFSET
3818 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3822 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3823 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3826 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3827 struct args_size
*alignment_pad
)
3829 tree save_var
= NULL_TREE
;
3830 HOST_WIDE_INT save_constant
= 0;
3831 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3832 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3834 #ifdef SPARC_STACK_BOUNDARY_HACK
3835 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3836 the real alignment of %sp. However, when it does this, the
3837 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3838 if (SPARC_STACK_BOUNDARY_HACK
)
3842 if (boundary
> PARM_BOUNDARY
)
3844 save_var
= offset_ptr
->var
;
3845 save_constant
= offset_ptr
->constant
;
3848 alignment_pad
->var
= NULL_TREE
;
3849 alignment_pad
->constant
= 0;
3851 if (boundary
> BITS_PER_UNIT
)
3853 if (offset_ptr
->var
)
3855 tree sp_offset_tree
= ssize_int (sp_offset
);
3856 tree offset
= size_binop (PLUS_EXPR
,
3857 ARGS_SIZE_TREE (*offset_ptr
),
3859 #ifdef ARGS_GROW_DOWNWARD
3860 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3862 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3865 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3866 /* ARGS_SIZE_TREE includes constant term. */
3867 offset_ptr
->constant
= 0;
3868 if (boundary
> PARM_BOUNDARY
)
3869 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3874 offset_ptr
->constant
= -sp_offset
+
3875 #ifdef ARGS_GROW_DOWNWARD
3876 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3878 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3880 if (boundary
> PARM_BOUNDARY
)
3881 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3887 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3889 if (passed_mode
!= BLKmode
)
3891 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3892 offset_ptr
->constant
3893 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3894 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3895 - GET_MODE_SIZE (passed_mode
));
3899 if (TREE_CODE (sizetree
) != INTEGER_CST
3900 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3902 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3903 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3905 ADD_PARM_SIZE (*offset_ptr
, s2
);
3906 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3912 /* True if register REGNO was alive at a place where `setjmp' was
3913 called and was set more than once or is an argument. Such regs may
3914 be clobbered by `longjmp'. */
3917 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3919 /* There appear to be cases where some local vars never reach the
3920 backend but have bogus regnos. */
3921 if (regno
>= max_reg_num ())
3924 return ((REG_N_SETS (regno
) > 1
3925 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
3926 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3929 /* Walk the tree of blocks describing the binding levels within a
3930 function and warn about variables the might be killed by setjmp or
3931 vfork. This is done after calling flow_analysis before register
3932 allocation since that will clobber the pseudo-regs to hard
3936 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3940 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
3942 if (TREE_CODE (decl
) == VAR_DECL
3943 && DECL_RTL_SET_P (decl
)
3944 && REG_P (DECL_RTL (decl
))
3945 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3946 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
3947 " %<longjmp%> or %<vfork%>", decl
);
3950 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3951 setjmp_vars_warning (setjmp_crosses
, sub
);
3954 /* Do the appropriate part of setjmp_vars_warning
3955 but for arguments instead of local variables. */
3958 setjmp_args_warning (bitmap setjmp_crosses
)
3961 for (decl
= DECL_ARGUMENTS (current_function_decl
);
3962 decl
; decl
= DECL_CHAIN (decl
))
3963 if (DECL_RTL (decl
) != 0
3964 && REG_P (DECL_RTL (decl
))
3965 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3966 warning (OPT_Wclobbered
,
3967 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3971 /* Generate warning messages for variables live across setjmp. */
3974 generate_setjmp_warnings (void)
3976 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
3978 if (n_basic_blocks
== NUM_FIXED_BLOCKS
3979 || bitmap_empty_p (setjmp_crosses
))
3982 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
3983 setjmp_args_warning (setjmp_crosses
);
3987 /* Reverse the order of elements in the fragment chain T of blocks,
3988 and return the new head of the chain (old last element). */
3991 block_fragments_nreverse (tree t
)
3993 tree prev
= 0, block
, next
;
3994 for (block
= t
; block
; block
= next
)
3996 next
= BLOCK_FRAGMENT_CHAIN (block
);
3997 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4003 /* Reverse the order of elements in the chain T of blocks,
4004 and return the new head of the chain (old last element).
4005 Also do the same on subblocks and reverse the order of elements
4006 in BLOCK_FRAGMENT_CHAIN as well. */
4009 blocks_nreverse_all (tree t
)
4011 tree prev
= 0, block
, next
;
4012 for (block
= t
; block
; block
= next
)
4014 next
= BLOCK_CHAIN (block
);
4015 BLOCK_CHAIN (block
) = prev
;
4016 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4017 if (BLOCK_FRAGMENT_CHAIN (block
)
4018 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4019 BLOCK_FRAGMENT_CHAIN (block
)
4020 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4027 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4028 and create duplicate blocks. */
4029 /* ??? Need an option to either create block fragments or to create
4030 abstract origin duplicates of a source block. It really depends
4031 on what optimization has been performed. */
4034 reorder_blocks (void)
4036 tree block
= DECL_INITIAL (current_function_decl
);
4037 VEC(tree
,heap
) *block_stack
;
4039 if (block
== NULL_TREE
)
4042 block_stack
= VEC_alloc (tree
, heap
, 10);
4044 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4045 clear_block_marks (block
);
4047 /* Prune the old trees away, so that they don't get in the way. */
4048 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4049 BLOCK_CHAIN (block
) = NULL_TREE
;
4051 /* Recreate the block tree from the note nesting. */
4052 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4053 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4055 VEC_free (tree
, heap
, block_stack
);
4058 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4061 clear_block_marks (tree block
)
4065 TREE_ASM_WRITTEN (block
) = 0;
4066 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4067 block
= BLOCK_CHAIN (block
);
4072 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
4076 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4080 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4082 tree block
= NOTE_BLOCK (insn
);
4085 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4088 /* If we have seen this block before, that means it now
4089 spans multiple address regions. Create a new fragment. */
4090 if (TREE_ASM_WRITTEN (block
))
4092 tree new_block
= copy_node (block
);
4094 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4095 BLOCK_FRAGMENT_CHAIN (new_block
)
4096 = BLOCK_FRAGMENT_CHAIN (origin
);
4097 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4099 NOTE_BLOCK (insn
) = new_block
;
4103 BLOCK_SUBBLOCKS (block
) = 0;
4104 TREE_ASM_WRITTEN (block
) = 1;
4105 /* When there's only one block for the entire function,
4106 current_block == block and we mustn't do this, it
4107 will cause infinite recursion. */
4108 if (block
!= current_block
)
4110 if (block
!= origin
)
4111 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
);
4113 BLOCK_SUPERCONTEXT (block
) = current_block
;
4114 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4115 BLOCK_SUBBLOCKS (current_block
) = block
;
4116 current_block
= origin
;
4118 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
4120 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4122 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
4123 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4129 /* Reverse the order of elements in the chain T of blocks,
4130 and return the new head of the chain (old last element). */
4133 blocks_nreverse (tree t
)
4135 tree prev
= 0, block
, next
;
4136 for (block
= t
; block
; block
= next
)
4138 next
= BLOCK_CHAIN (block
);
4139 BLOCK_CHAIN (block
) = prev
;
4145 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4146 non-NULL, list them all into VECTOR, in a depth-first preorder
4147 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4151 all_blocks (tree block
, tree
*vector
)
4157 TREE_ASM_WRITTEN (block
) = 0;
4159 /* Record this block. */
4161 vector
[n_blocks
] = block
;
4165 /* Record the subblocks, and their subblocks... */
4166 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4167 vector
? vector
+ n_blocks
: 0);
4168 block
= BLOCK_CHAIN (block
);
4174 /* Return a vector containing all the blocks rooted at BLOCK. The
4175 number of elements in the vector is stored in N_BLOCKS_P. The
4176 vector is dynamically allocated; it is the caller's responsibility
4177 to call `free' on the pointer returned. */
4180 get_block_vector (tree block
, int *n_blocks_p
)
4184 *n_blocks_p
= all_blocks (block
, NULL
);
4185 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4186 all_blocks (block
, block_vector
);
4188 return block_vector
;
4191 static GTY(()) int next_block_index
= 2;
4193 /* Set BLOCK_NUMBER for all the blocks in FN. */
4196 number_blocks (tree fn
)
4202 /* For SDB and XCOFF debugging output, we start numbering the blocks
4203 from 1 within each function, rather than keeping a running
4205 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4206 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4207 next_block_index
= 1;
4210 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4212 /* The top-level BLOCK isn't numbered at all. */
4213 for (i
= 1; i
< n_blocks
; ++i
)
4214 /* We number the blocks from two. */
4215 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4217 free (block_vector
);
4222 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4225 debug_find_var_in_block_tree (tree var
, tree block
)
4229 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4233 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4235 tree ret
= debug_find_var_in_block_tree (var
, t
);
4243 /* Keep track of whether we're in a dummy function context. If we are,
4244 we don't want to invoke the set_current_function hook, because we'll
4245 get into trouble if the hook calls target_reinit () recursively or
4246 when the initial initialization is not yet complete. */
4248 static bool in_dummy_function
;
4250 /* Invoke the target hook when setting cfun. Update the optimization options
4251 if the function uses different options than the default. */
4254 invoke_set_current_function_hook (tree fndecl
)
4256 if (!in_dummy_function
)
4258 tree opts
= ((fndecl
)
4259 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4260 : optimization_default_node
);
4263 opts
= optimization_default_node
;
4265 /* Change optimization options if needed. */
4266 if (optimization_current_node
!= opts
)
4268 optimization_current_node
= opts
;
4269 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4272 targetm
.set_current_function (fndecl
);
4276 /* cfun should never be set directly; use this function. */
4279 set_cfun (struct function
*new_cfun
)
4281 if (cfun
!= new_cfun
)
4284 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4288 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4290 static VEC(function_p
,heap
) *cfun_stack
;
4292 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4295 push_cfun (struct function
*new_cfun
)
4297 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4298 set_cfun (new_cfun
);
4301 /* Pop cfun from the stack. */
4306 struct function
*new_cfun
= VEC_pop (function_p
, cfun_stack
);
4307 set_cfun (new_cfun
);
4310 /* Return value of funcdef and increase it. */
4312 get_next_funcdef_no (void)
4314 return funcdef_no
++;
4317 /* Allocate a function structure for FNDECL and set its contents
4318 to the defaults. Set cfun to the newly-allocated object.
4319 Some of the helper functions invoked during initialization assume
4320 that cfun has already been set. Therefore, assign the new object
4321 directly into cfun and invoke the back end hook explicitly at the
4322 very end, rather than initializing a temporary and calling set_cfun
4325 ABSTRACT_P is true if this is a function that will never be seen by
4326 the middle-end. Such functions are front-end concepts (like C++
4327 function templates) that do not correspond directly to functions
4328 placed in object files. */
4331 allocate_struct_function (tree fndecl
, bool abstract_p
)
4334 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4336 cfun
= ggc_alloc_cleared_function ();
4338 init_eh_for_function ();
4340 if (init_machine_status
)
4341 cfun
->machine
= (*init_machine_status
) ();
4343 #ifdef OVERRIDE_ABI_FORMAT
4344 OVERRIDE_ABI_FORMAT (fndecl
);
4347 invoke_set_current_function_hook (fndecl
);
4349 if (fndecl
!= NULL_TREE
)
4351 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4352 cfun
->decl
= fndecl
;
4353 current_function_funcdef_no
= get_next_funcdef_no ();
4355 result
= DECL_RESULT (fndecl
);
4356 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4358 #ifdef PCC_STATIC_STRUCT_RETURN
4359 cfun
->returns_pcc_struct
= 1;
4361 cfun
->returns_struct
= 1;
4364 cfun
->stdarg
= stdarg_p (fntype
);
4366 /* Assume all registers in stdarg functions need to be saved. */
4367 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4368 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4370 /* ??? This could be set on a per-function basis by the front-end
4371 but is this worth the hassle? */
4372 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4376 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4377 instead of just setting it. */
4380 push_struct_function (tree fndecl
)
4382 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4383 allocate_struct_function (fndecl
, false);
4386 /* Reset crtl and other non-struct-function variables to defaults as
4387 appropriate for emitting rtl at the start of a function. */
4390 prepare_function_start (void)
4392 gcc_assert (!crtl
->emit
.x_last_insn
);
4395 init_varasm_status ();
4397 default_rtl_profile ();
4399 if (flag_stack_usage
)
4401 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4402 cfun
->su
->static_stack_size
= -1;
4405 cse_not_expected
= ! optimize
;
4407 /* Caller save not needed yet. */
4408 caller_save_needed
= 0;
4410 /* We haven't done register allocation yet. */
4413 /* Indicate that we have not instantiated virtual registers yet. */
4414 virtuals_instantiated
= 0;
4416 /* Indicate that we want CONCATs now. */
4417 generating_concat_p
= 1;
4419 /* Indicate we have no need of a frame pointer yet. */
4420 frame_pointer_needed
= 0;
4423 /* Initialize the rtl expansion mechanism so that we can do simple things
4424 like generate sequences. This is used to provide a context during global
4425 initialization of some passes. You must call expand_dummy_function_end
4426 to exit this context. */
4429 init_dummy_function_start (void)
4431 gcc_assert (!in_dummy_function
);
4432 in_dummy_function
= true;
4433 push_struct_function (NULL_TREE
);
4434 prepare_function_start ();
4437 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4438 and initialize static variables for generating RTL for the statements
4442 init_function_start (tree subr
)
4444 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4445 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4447 allocate_struct_function (subr
, false);
4448 prepare_function_start ();
4450 /* Warn if this value is an aggregate type,
4451 regardless of which calling convention we are using for it. */
4452 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4453 warning (OPT_Waggregate_return
, "function returns an aggregate");
4456 /* Make sure all values used by the optimization passes have sane defaults. */
4458 init_function_for_compilation (void)
4464 struct rtl_opt_pass pass_init_function
=
4468 "*init_function", /* name */
4470 init_function_for_compilation
, /* execute */
4473 0, /* static_pass_number */
4474 TV_NONE
, /* tv_id */
4475 0, /* properties_required */
4476 0, /* properties_provided */
4477 0, /* properties_destroyed */
4478 0, /* todo_flags_start */
4479 0 /* todo_flags_finish */
4485 expand_main_function (void)
4487 #if (defined(INVOKE__main) \
4488 || (!defined(HAS_INIT_SECTION) \
4489 && !defined(INIT_SECTION_ASM_OP) \
4490 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4491 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4495 /* Expand code to initialize the stack_protect_guard. This is invoked at
4496 the beginning of a function to be protected. */
4498 #ifndef HAVE_stack_protect_set
4499 # define HAVE_stack_protect_set 0
4500 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4504 stack_protect_prologue (void)
4506 tree guard_decl
= targetm
.stack_protect_guard ();
4509 x
= expand_normal (crtl
->stack_protect_guard
);
4510 y
= expand_normal (guard_decl
);
4512 /* Allow the target to copy from Y to X without leaking Y into a
4514 if (HAVE_stack_protect_set
)
4516 rtx insn
= gen_stack_protect_set (x
, y
);
4524 /* Otherwise do a straight move. */
4525 emit_move_insn (x
, y
);
4528 /* Expand code to verify the stack_protect_guard. This is invoked at
4529 the end of a function to be protected. */
4531 #ifndef HAVE_stack_protect_test
4532 # define HAVE_stack_protect_test 0
4533 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4537 stack_protect_epilogue (void)
4539 tree guard_decl
= targetm
.stack_protect_guard ();
4540 rtx label
= gen_label_rtx ();
4543 x
= expand_normal (crtl
->stack_protect_guard
);
4544 y
= expand_normal (guard_decl
);
4546 /* Allow the target to compare Y with X without leaking either into
4548 switch (HAVE_stack_protect_test
!= 0)
4551 tmp
= gen_stack_protect_test (x
, y
, label
);
4560 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4564 /* The noreturn predictor has been moved to the tree level. The rtl-level
4565 predictors estimate this branch about 20%, which isn't enough to get
4566 things moved out of line. Since this is the only extant case of adding
4567 a noreturn function at the rtl level, it doesn't seem worth doing ought
4568 except adding the prediction by hand. */
4569 tmp
= get_last_insn ();
4571 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4573 expand_expr_stmt (targetm
.stack_protect_fail ());
4577 /* Start the RTL for a new function, and set variables used for
4579 SUBR is the FUNCTION_DECL node.
4580 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4581 the function's parameters, which must be run at any return statement. */
4584 expand_function_start (tree subr
)
4586 /* Make sure volatile mem refs aren't considered
4587 valid operands of arithmetic insns. */
4588 init_recog_no_volatile ();
4592 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4595 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4597 /* Make the label for return statements to jump to. Do not special
4598 case machines with special return instructions -- they will be
4599 handled later during jump, ifcvt, or epilogue creation. */
4600 return_label
= gen_label_rtx ();
4602 /* Initialize rtx used to return the value. */
4603 /* Do this before assign_parms so that we copy the struct value address
4604 before any library calls that assign parms might generate. */
4606 /* Decide whether to return the value in memory or in a register. */
4607 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4609 /* Returning something that won't go in a register. */
4610 rtx value_address
= 0;
4612 #ifdef PCC_STATIC_STRUCT_RETURN
4613 if (cfun
->returns_pcc_struct
)
4615 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4616 value_address
= assemble_static_space (size
);
4621 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4622 /* Expect to be passed the address of a place to store the value.
4623 If it is passed as an argument, assign_parms will take care of
4627 value_address
= gen_reg_rtx (Pmode
);
4628 emit_move_insn (value_address
, sv
);
4633 rtx x
= value_address
;
4634 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4636 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4637 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4639 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4642 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4643 /* If return mode is void, this decl rtl should not be used. */
4644 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4647 /* Compute the return values into a pseudo reg, which we will copy
4648 into the true return register after the cleanups are done. */
4649 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4650 if (TYPE_MODE (return_type
) != BLKmode
4651 && targetm
.calls
.return_in_msb (return_type
))
4652 /* expand_function_end will insert the appropriate padding in
4653 this case. Use the return value's natural (unpadded) mode
4654 within the function proper. */
4655 SET_DECL_RTL (DECL_RESULT (subr
),
4656 gen_reg_rtx (TYPE_MODE (return_type
)));
4659 /* In order to figure out what mode to use for the pseudo, we
4660 figure out what the mode of the eventual return register will
4661 actually be, and use that. */
4662 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4664 /* Structures that are returned in registers are not
4665 aggregate_value_p, so we may see a PARALLEL or a REG. */
4666 if (REG_P (hard_reg
))
4667 SET_DECL_RTL (DECL_RESULT (subr
),
4668 gen_reg_rtx (GET_MODE (hard_reg
)));
4671 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4672 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4676 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4677 result to the real return register(s). */
4678 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4681 /* Initialize rtx for parameters and local variables.
4682 In some cases this requires emitting insns. */
4683 assign_parms (subr
);
4685 /* If function gets a static chain arg, store it. */
4686 if (cfun
->static_chain_decl
)
4688 tree parm
= cfun
->static_chain_decl
;
4689 rtx local
, chain
, insn
;
4691 local
= gen_reg_rtx (Pmode
);
4692 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4694 set_decl_incoming_rtl (parm
, chain
, false);
4695 SET_DECL_RTL (parm
, local
);
4696 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4698 insn
= emit_move_insn (local
, chain
);
4700 /* Mark the register as eliminable, similar to parameters. */
4702 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4703 set_unique_reg_note (insn
, REG_EQUIV
, chain
);
4706 /* If the function receives a non-local goto, then store the
4707 bits we need to restore the frame pointer. */
4708 if (cfun
->nonlocal_goto_save_area
)
4713 /* ??? We need to do this save early. Unfortunately here is
4714 before the frame variable gets declared. Help out... */
4715 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4716 if (!DECL_RTL_SET_P (var
))
4719 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4720 cfun
->nonlocal_goto_save_area
,
4721 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4722 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4723 r_save
= convert_memory_address (Pmode
, r_save
);
4725 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4726 update_nonlocal_goto_save_area ();
4729 /* The following was moved from init_function_start.
4730 The move is supposed to make sdb output more accurate. */
4731 /* Indicate the beginning of the function body,
4732 as opposed to parm setup. */
4733 emit_note (NOTE_INSN_FUNCTION_BEG
);
4735 gcc_assert (NOTE_P (get_last_insn ()));
4737 parm_birth_insn
= get_last_insn ();
4742 PROFILE_HOOK (current_function_funcdef_no
);
4746 /* After the display initializations is where the stack checking
4748 if(flag_stack_check
)
4749 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4751 /* Make sure there is a line number after the function entry setup code. */
4752 force_next_line_note ();
4755 /* Undo the effects of init_dummy_function_start. */
4757 expand_dummy_function_end (void)
4759 gcc_assert (in_dummy_function
);
4761 /* End any sequences that failed to be closed due to syntax errors. */
4762 while (in_sequence_p ())
4765 /* Outside function body, can't compute type's actual size
4766 until next function's body starts. */
4768 free_after_parsing (cfun
);
4769 free_after_compilation (cfun
);
4771 in_dummy_function
= false;
4774 /* Call DOIT for each hard register used as a return value from
4775 the current function. */
4778 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4780 rtx outgoing
= crtl
->return_rtx
;
4785 if (REG_P (outgoing
))
4786 (*doit
) (outgoing
, arg
);
4787 else if (GET_CODE (outgoing
) == PARALLEL
)
4791 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4793 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4795 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4802 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4808 clobber_return_register (void)
4810 diddle_return_value (do_clobber_return_reg
, NULL
);
4812 /* In case we do use pseudo to return value, clobber it too. */
4813 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4815 tree decl_result
= DECL_RESULT (current_function_decl
);
4816 rtx decl_rtl
= DECL_RTL (decl_result
);
4817 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4819 do_clobber_return_reg (decl_rtl
, NULL
);
4825 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4831 use_return_register (void)
4833 diddle_return_value (do_use_return_reg
, NULL
);
4836 /* Possibly warn about unused parameters. */
4838 do_warn_unused_parameter (tree fn
)
4842 for (decl
= DECL_ARGUMENTS (fn
);
4843 decl
; decl
= DECL_CHAIN (decl
))
4844 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4845 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4846 && !TREE_NO_WARNING (decl
))
4847 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4850 static GTY(()) rtx initial_trampoline
;
4852 /* Generate RTL for the end of the current function. */
4855 expand_function_end (void)
4859 /* If arg_pointer_save_area was referenced only from a nested
4860 function, we will not have initialized it yet. Do that now. */
4861 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4862 get_arg_pointer_save_area ();
4864 /* If we are doing generic stack checking and this function makes calls,
4865 do a stack probe at the start of the function to ensure we have enough
4866 space for another stack frame. */
4867 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4871 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4874 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
4876 if (STACK_CHECK_MOVING_SP
)
4877 anti_adjust_stack_and_probe (max_frame_size
, true);
4879 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
4882 set_insn_locators (seq
, prologue_locator
);
4883 emit_insn_before (seq
, stack_check_probe_note
);
4888 /* End any sequences that failed to be closed due to syntax errors. */
4889 while (in_sequence_p ())
4892 clear_pending_stack_adjust ();
4893 do_pending_stack_adjust ();
4895 /* Output a linenumber for the end of the function.
4896 SDB depends on this. */
4897 force_next_line_note ();
4898 set_curr_insn_source_location (input_location
);
4900 /* Before the return label (if any), clobber the return
4901 registers so that they are not propagated live to the rest of
4902 the function. This can only happen with functions that drop
4903 through; if there had been a return statement, there would
4904 have either been a return rtx, or a jump to the return label.
4906 We delay actual code generation after the current_function_value_rtx
4908 clobber_after
= get_last_insn ();
4910 /* Output the label for the actual return from the function. */
4911 emit_label (return_label
);
4913 if (targetm
.except_unwind_info (&global_options
) == UI_SJLJ
)
4915 /* Let except.c know where it should emit the call to unregister
4916 the function context for sjlj exceptions. */
4917 if (flag_exceptions
)
4918 sjlj_emit_function_exit_after (get_last_insn ());
4922 /* We want to ensure that instructions that may trap are not
4923 moved into the epilogue by scheduling, because we don't
4924 always emit unwind information for the epilogue. */
4925 if (cfun
->can_throw_non_call_exceptions
)
4926 emit_insn (gen_blockage ());
4929 /* If this is an implementation of throw, do what's necessary to
4930 communicate between __builtin_eh_return and the epilogue. */
4931 expand_eh_return ();
4933 /* If scalar return value was computed in a pseudo-reg, or was a named
4934 return value that got dumped to the stack, copy that to the hard
4936 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4938 tree decl_result
= DECL_RESULT (current_function_decl
);
4939 rtx decl_rtl
= DECL_RTL (decl_result
);
4941 if (REG_P (decl_rtl
)
4942 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4943 : DECL_REGISTER (decl_result
))
4945 rtx real_decl_rtl
= crtl
->return_rtx
;
4947 /* This should be set in assign_parms. */
4948 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4950 /* If this is a BLKmode structure being returned in registers,
4951 then use the mode computed in expand_return. Note that if
4952 decl_rtl is memory, then its mode may have been changed,
4953 but that crtl->return_rtx has not. */
4954 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4955 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4957 /* If a non-BLKmode return value should be padded at the least
4958 significant end of the register, shift it left by the appropriate
4959 amount. BLKmode results are handled using the group load/store
4961 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
4962 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
4964 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
4965 REGNO (real_decl_rtl
)),
4967 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
4969 /* If a named return value dumped decl_return to memory, then
4970 we may need to re-do the PROMOTE_MODE signed/unsigned
4972 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
4974 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
4975 promote_function_mode (TREE_TYPE (decl_result
),
4976 GET_MODE (decl_rtl
), &unsignedp
,
4977 TREE_TYPE (current_function_decl
), 1);
4979 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
4981 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
4983 /* If expand_function_start has created a PARALLEL for decl_rtl,
4984 move the result to the real return registers. Otherwise, do
4985 a group load from decl_rtl for a named return. */
4986 if (GET_CODE (decl_rtl
) == PARALLEL
)
4987 emit_group_move (real_decl_rtl
, decl_rtl
);
4989 emit_group_load (real_decl_rtl
, decl_rtl
,
4990 TREE_TYPE (decl_result
),
4991 int_size_in_bytes (TREE_TYPE (decl_result
)));
4993 /* In the case of complex integer modes smaller than a word, we'll
4994 need to generate some non-trivial bitfield insertions. Do that
4995 on a pseudo and not the hard register. */
4996 else if (GET_CODE (decl_rtl
) == CONCAT
4997 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
4998 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5000 int old_generating_concat_p
;
5003 old_generating_concat_p
= generating_concat_p
;
5004 generating_concat_p
= 0;
5005 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5006 generating_concat_p
= old_generating_concat_p
;
5008 emit_move_insn (tmp
, decl_rtl
);
5009 emit_move_insn (real_decl_rtl
, tmp
);
5012 emit_move_insn (real_decl_rtl
, decl_rtl
);
5016 /* If returning a structure, arrange to return the address of the value
5017 in a place where debuggers expect to find it.
5019 If returning a structure PCC style,
5020 the caller also depends on this value.
5021 And cfun->returns_pcc_struct is not necessarily set. */
5022 if (cfun
->returns_struct
5023 || cfun
->returns_pcc_struct
)
5025 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5026 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5029 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5030 type
= TREE_TYPE (type
);
5032 value_address
= XEXP (value_address
, 0);
5034 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5035 current_function_decl
, true);
5037 /* Mark this as a function return value so integrate will delete the
5038 assignment and USE below when inlining this function. */
5039 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5041 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5042 value_address
= convert_memory_address (GET_MODE (outgoing
),
5045 emit_move_insn (outgoing
, value_address
);
5047 /* Show return register used to hold result (in this case the address
5049 crtl
->return_rtx
= outgoing
;
5052 /* Emit the actual code to clobber return register. */
5057 clobber_return_register ();
5061 emit_insn_after (seq
, clobber_after
);
5064 /* Output the label for the naked return from the function. */
5065 if (naked_return_label
)
5066 emit_label (naked_return_label
);
5068 /* @@@ This is a kludge. We want to ensure that instructions that
5069 may trap are not moved into the epilogue by scheduling, because
5070 we don't always emit unwind information for the epilogue. */
5071 if (cfun
->can_throw_non_call_exceptions
5072 && targetm
.except_unwind_info (&global_options
) != UI_SJLJ
)
5073 emit_insn (gen_blockage ());
5075 /* If stack protection is enabled for this function, check the guard. */
5076 if (crtl
->stack_protect_guard
)
5077 stack_protect_epilogue ();
5079 /* If we had calls to alloca, and this machine needs
5080 an accurate stack pointer to exit the function,
5081 insert some code to save and restore the stack pointer. */
5082 if (! EXIT_IGNORE_STACK
5083 && cfun
->calls_alloca
)
5087 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5088 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5091 /* ??? This should no longer be necessary since stupid is no longer with
5092 us, but there are some parts of the compiler (eg reload_combine, and
5093 sh mach_dep_reorg) that still try and compute their own lifetime info
5094 instead of using the general framework. */
5095 use_return_register ();
5099 get_arg_pointer_save_area (void)
5101 rtx ret
= arg_pointer_save_area
;
5105 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5106 arg_pointer_save_area
= ret
;
5109 if (! crtl
->arg_pointer_save_area_init
)
5113 /* Save the arg pointer at the beginning of the function. The
5114 generated stack slot may not be a valid memory address, so we
5115 have to check it and fix it if necessary. */
5117 emit_move_insn (validize_mem (ret
),
5118 crtl
->args
.internal_arg_pointer
);
5122 push_topmost_sequence ();
5123 emit_insn_after (seq
, entry_of_function ());
5124 pop_topmost_sequence ();
5126 crtl
->arg_pointer_save_area_init
= true;
5132 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5133 for the first time. */
5136 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5139 htab_t hash
= *hashp
;
5143 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5145 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5147 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5148 gcc_assert (*slot
== NULL
);
5153 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5154 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5155 insn, then record COPY as well. */
5158 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5163 hash
= epilogue_insn_hash
;
5164 if (!hash
|| !htab_find (hash
, insn
))
5166 hash
= prologue_insn_hash
;
5167 if (!hash
|| !htab_find (hash
, insn
))
5171 slot
= htab_find_slot (hash
, copy
, INSERT
);
5172 gcc_assert (*slot
== NULL
);
5176 /* Set the locator of the insn chain starting at INSN to LOC. */
5178 set_insn_locators (rtx insn
, int loc
)
5180 while (insn
!= NULL_RTX
)
5183 INSN_LOCATOR (insn
) = loc
;
5184 insn
= NEXT_INSN (insn
);
5188 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5189 we can be running after reorg, SEQUENCE rtl is possible. */
5192 contains (const_rtx insn
, htab_t hash
)
5197 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5200 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5201 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5206 return htab_find (hash
, insn
) != NULL
;
5210 prologue_epilogue_contains (const_rtx insn
)
5212 if (contains (insn
, prologue_insn_hash
))
5214 if (contains (insn
, epilogue_insn_hash
))
5220 /* Insert gen_return at the end of block BB. This also means updating
5221 block_for_insn appropriately. */
5224 emit_return_into_block (basic_block bb
)
5226 emit_jump_insn_after (gen_return (), BB_END (bb
));
5228 #endif /* HAVE_return */
5230 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5231 this into place with notes indicating where the prologue ends and where
5232 the epilogue begins. Update the basic block information when possible. */
5235 thread_prologue_and_epilogue_insns (void)
5238 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5239 edge entry_edge ATTRIBUTE_UNUSED
;
5243 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
5247 epilogue_end
= NULL_RTX
;
5249 /* Can't deal with multiple successors of the entry block at the
5250 moment. Function should always have at least one entry
5252 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5253 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
5255 if (flag_split_stack
5256 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5259 #ifndef HAVE_split_stack_prologue
5262 gcc_assert (HAVE_split_stack_prologue
);
5265 emit_insn (gen_split_stack_prologue ());
5269 record_insns (seq
, NULL
, &prologue_insn_hash
);
5270 set_insn_locators (seq
, prologue_locator
);
5272 /* This relies on the fact that committing the edge insertion
5273 will look for basic blocks within the inserted instructions,
5274 which in turn relies on the fact that we are not in CFG
5275 layout mode here. */
5276 insert_insn_on_edge (seq
, entry_edge
);
5281 #ifdef HAVE_prologue
5285 seq
= gen_prologue ();
5288 /* Insert an explicit USE for the frame pointer
5289 if the profiling is on and the frame pointer is required. */
5290 if (crtl
->profile
&& frame_pointer_needed
)
5291 emit_use (hard_frame_pointer_rtx
);
5293 /* Retain a map of the prologue insns. */
5294 record_insns (seq
, NULL
, &prologue_insn_hash
);
5295 emit_note (NOTE_INSN_PROLOGUE_END
);
5297 /* Ensure that instructions are not moved into the prologue when
5298 profiling is on. The call to the profiling routine can be
5299 emitted within the live range of a call-clobbered register. */
5300 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5301 emit_insn (gen_blockage ());
5305 set_insn_locators (seq
, prologue_locator
);
5307 insert_insn_on_edge (seq
, entry_edge
);
5312 /* If the exit block has no non-fake predecessors, we don't need
5314 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5315 if ((e
->flags
& EDGE_FAKE
) == 0)
5320 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
5322 if (optimize
&& HAVE_return
)
5324 /* If we're allowed to generate a simple return instruction,
5325 then by definition we don't need a full epilogue. Examine
5326 the block that falls through to EXIT. If it does not
5327 contain any code, examine its predecessors and try to
5328 emit (conditional) return instructions. */
5333 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
5338 /* Verify that there are no active instructions in the last block. */
5339 label
= BB_END (last
);
5340 while (label
&& !LABEL_P (label
))
5342 if (active_insn_p (label
))
5344 label
= PREV_INSN (label
);
5347 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
5351 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
5353 basic_block bb
= e
->src
;
5356 if (bb
== ENTRY_BLOCK_PTR
)
5363 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5369 /* If we have an unconditional jump, we can replace that
5370 with a simple return instruction. */
5371 if (simplejump_p (jump
))
5373 emit_return_into_block (bb
);
5377 /* If we have a conditional jump, we can try to replace
5378 that with a conditional return instruction. */
5379 else if (condjump_p (jump
))
5381 if (! redirect_jump (jump
, 0, 0))
5387 /* If this block has only one successor, it both jumps
5388 and falls through to the fallthru block, so we can't
5390 if (single_succ_p (bb
))
5402 /* Fix up the CFG for the successful change we just made. */
5403 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5406 /* Emit a return insn for the exit fallthru block. Whether
5407 this is still reachable will be determined later. */
5409 emit_barrier_after (BB_END (last
));
5410 emit_return_into_block (last
);
5411 epilogue_end
= BB_END (last
);
5412 single_succ_edge (last
)->flags
&= ~EDGE_FALLTHRU
;
5418 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5419 this marker for the splits of EH_RETURN patterns, and nothing else
5420 uses the flag in the meantime. */
5421 epilogue_completed
= 1;
5423 #ifdef HAVE_eh_return
5424 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5425 some targets, these get split to a special version of the epilogue
5426 code. In order to be able to properly annotate these with unwind
5427 info, try to split them now. If we get a valid split, drop an
5428 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5429 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5431 rtx prev
, last
, trial
;
5433 if (e
->flags
& EDGE_FALLTHRU
)
5435 last
= BB_END (e
->src
);
5436 if (!eh_returnjump_p (last
))
5439 prev
= PREV_INSN (last
);
5440 trial
= try_split (PATTERN (last
), last
, 1);
5444 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5445 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5449 /* Find the edge that falls through to EXIT. Other edges may exist
5450 due to RETURN instructions, but those don't need epilogues.
5451 There really shouldn't be a mixture -- either all should have
5452 been converted or none, however... */
5454 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
5458 #ifdef HAVE_epilogue
5462 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5463 seq
= gen_epilogue ();
5464 emit_jump_insn (seq
);
5466 /* Retain a map of the epilogue insns. */
5467 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5468 set_insn_locators (seq
, epilogue_locator
);
5473 insert_insn_on_edge (seq
, e
);
5481 if (! next_active_insn (BB_END (e
->src
)))
5483 /* We have a fall-through edge to the exit block, the source is not
5484 at the end of the function, and there will be an assembler epilogue
5485 at the end of the function.
5486 We can't use force_nonfallthru here, because that would try to
5487 use return. Inserting a jump 'by hand' is extremely messy, so
5488 we take advantage of cfg_layout_finalize using
5489 fixup_fallthru_exit_predecessor. */
5490 cfg_layout_initialize (0);
5491 FOR_EACH_BB (cur_bb
)
5492 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5493 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5494 cur_bb
->aux
= cur_bb
->next_bb
;
5495 cfg_layout_finalize ();
5498 default_rtl_profile ();
5502 commit_edge_insertions ();
5504 /* The epilogue insns we inserted may cause the exit edge to no longer
5506 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5508 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5509 && returnjump_p (BB_END (e
->src
)))
5510 e
->flags
&= ~EDGE_FALLTHRU
;
5514 #ifdef HAVE_sibcall_epilogue
5515 /* Emit sibling epilogues before any sibling call sites. */
5516 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5518 basic_block bb
= e
->src
;
5519 rtx insn
= BB_END (bb
);
5522 || ! SIBLING_CALL_P (insn
))
5529 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5530 emit_insn (gen_sibcall_epilogue ());
5534 /* Retain a map of the epilogue insns. Used in life analysis to
5535 avoid getting rid of sibcall epilogue insns. Do this before we
5536 actually emit the sequence. */
5537 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5538 set_insn_locators (seq
, epilogue_locator
);
5540 emit_insn_before (seq
, insn
);
5545 #ifdef HAVE_epilogue
5550 /* Similarly, move any line notes that appear after the epilogue.
5551 There is no need, however, to be quite so anal about the existence
5552 of such a note. Also possibly move
5553 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5555 for (insn
= epilogue_end
; insn
; insn
= next
)
5557 next
= NEXT_INSN (insn
);
5559 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
5560 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5565 /* Threading the prologue and epilogue changes the artificial refs
5566 in the entry and exit blocks. */
5567 epilogue_completed
= 1;
5568 df_update_entry_exit_and_calls ();
5571 /* Reposition the prologue-end and epilogue-begin notes after
5572 instruction scheduling. */
5575 reposition_prologue_and_epilogue_notes (void)
5577 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5578 || defined (HAVE_sibcall_epilogue)
5579 /* Since the hash table is created on demand, the fact that it is
5580 non-null is a signal that it is non-empty. */
5581 if (prologue_insn_hash
!= NULL
)
5583 size_t len
= htab_elements (prologue_insn_hash
);
5584 rtx insn
, last
= NULL
, note
= NULL
;
5586 /* Scan from the beginning until we reach the last prologue insn. */
5587 /* ??? While we do have the CFG intact, there are two problems:
5588 (1) The prologue can contain loops (typically probing the stack),
5589 which means that the end of the prologue isn't in the first bb.
5590 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5591 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5595 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
5598 else if (contains (insn
, prologue_insn_hash
))
5610 /* Scan forward looking for the PROLOGUE_END note. It should
5611 be right at the beginning of the block, possibly with other
5612 insn notes that got moved there. */
5613 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
5616 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
5621 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5623 last
= NEXT_INSN (last
);
5624 reorder_insns (note
, note
, last
);
5628 if (epilogue_insn_hash
!= NULL
)
5633 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5635 rtx insn
, first
= NULL
, note
= NULL
;
5636 basic_block bb
= e
->src
;
5638 /* Scan from the beginning until we reach the first epilogue insn. */
5639 FOR_BB_INSNS (bb
, insn
)
5643 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5650 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
5660 /* If the function has a single basic block, and no real
5661 epilogue insns (e.g. sibcall with no cleanup), the
5662 epilogue note can get scheduled before the prologue
5663 note. If we have frame related prologue insns, having
5664 them scanned during the epilogue will result in a crash.
5665 In this case re-order the epilogue note to just before
5666 the last insn in the block. */
5668 first
= BB_END (bb
);
5670 if (PREV_INSN (first
) != note
)
5671 reorder_insns (note
, note
, PREV_INSN (first
));
5675 #endif /* HAVE_prologue or HAVE_epilogue */
5678 /* Returns the name of the current function. */
5680 current_function_name (void)
5684 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5689 rest_of_handle_check_leaf_regs (void)
5691 #ifdef LEAF_REGISTERS
5692 current_function_uses_only_leaf_regs
5693 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
5698 /* Insert a TYPE into the used types hash table of CFUN. */
5701 used_types_insert_helper (tree type
, struct function
*func
)
5703 if (type
!= NULL
&& func
!= NULL
)
5707 if (func
->used_types_hash
== NULL
)
5708 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
5709 htab_eq_pointer
, NULL
);
5710 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
5716 /* Given a type, insert it into the used hash table in cfun. */
5718 used_types_insert (tree t
)
5720 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
5725 if (TREE_CODE (t
) == ERROR_MARK
)
5727 if (TYPE_NAME (t
) == NULL_TREE
5728 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
5729 t
= TYPE_MAIN_VARIANT (t
);
5730 if (debug_info_level
> DINFO_LEVEL_NONE
)
5733 used_types_insert_helper (t
, cfun
);
5735 /* So this might be a type referenced by a global variable.
5736 Record that type so that we can later decide to emit its debug
5738 VEC_safe_push (tree
, gc
, types_used_by_cur_var_decl
, t
);
5742 /* Helper to Hash a struct types_used_by_vars_entry. */
5745 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
5747 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
5749 return iterative_hash_object (entry
->type
,
5750 iterative_hash_object (entry
->var_decl
, 0));
5753 /* Hash function of the types_used_by_vars_entry hash table. */
5756 types_used_by_vars_do_hash (const void *x
)
5758 const struct types_used_by_vars_entry
*entry
=
5759 (const struct types_used_by_vars_entry
*) x
;
5761 return hash_types_used_by_vars_entry (entry
);
5764 /*Equality function of the types_used_by_vars_entry hash table. */
5767 types_used_by_vars_eq (const void *x1
, const void *x2
)
5769 const struct types_used_by_vars_entry
*e1
=
5770 (const struct types_used_by_vars_entry
*) x1
;
5771 const struct types_used_by_vars_entry
*e2
=
5772 (const struct types_used_by_vars_entry
*)x2
;
5774 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
5777 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5780 types_used_by_var_decl_insert (tree type
, tree var_decl
)
5782 if (type
!= NULL
&& var_decl
!= NULL
)
5785 struct types_used_by_vars_entry e
;
5786 e
.var_decl
= var_decl
;
5788 if (types_used_by_vars_hash
== NULL
)
5789 types_used_by_vars_hash
=
5790 htab_create_ggc (37, types_used_by_vars_do_hash
,
5791 types_used_by_vars_eq
, NULL
);
5792 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
5793 hash_types_used_by_vars_entry (&e
), INSERT
);
5796 struct types_used_by_vars_entry
*entry
;
5797 entry
= ggc_alloc_types_used_by_vars_entry ();
5799 entry
->var_decl
= var_decl
;
5805 struct rtl_opt_pass pass_leaf_regs
=
5809 "*leaf_regs", /* name */
5811 rest_of_handle_check_leaf_regs
, /* execute */
5814 0, /* static_pass_number */
5815 TV_NONE
, /* tv_id */
5816 0, /* properties_required */
5817 0, /* properties_provided */
5818 0, /* properties_destroyed */
5819 0, /* todo_flags_start */
5820 0 /* todo_flags_finish */
5825 rest_of_handle_thread_prologue_and_epilogue (void)
5828 cleanup_cfg (CLEANUP_EXPENSIVE
);
5830 /* On some machines, the prologue and epilogue code, or parts thereof,
5831 can be represented as RTL. Doing so lets us schedule insns between
5832 it and the rest of the code and also allows delayed branch
5833 scheduling to operate in the epilogue. */
5834 thread_prologue_and_epilogue_insns ();
5836 /* The stack usage info is finalized during prologue expansion. */
5837 if (flag_stack_usage
)
5838 output_stack_usage ();
5843 struct rtl_opt_pass pass_thread_prologue_and_epilogue
=
5847 "pro_and_epilogue", /* name */
5849 rest_of_handle_thread_prologue_and_epilogue
, /* execute */
5852 0, /* static_pass_number */
5853 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
5854 0, /* properties_required */
5855 0, /* properties_provided */
5856 0, /* properties_destroyed */
5857 TODO_verify_flow
, /* todo_flags_start */
5860 TODO_df_finish
| TODO_verify_rtl_sharing
|
5861 TODO_ggc_collect
/* todo_flags_finish */
5866 /* This mini-pass fixes fall-out from SSA in asm statements that have
5867 in-out constraints. Say you start with
5870 asm ("": "+mr" (inout));
5873 which is transformed very early to use explicit output and match operands:
5876 asm ("": "=mr" (inout) : "0" (inout));
5879 Or, after SSA and copyprop,
5881 asm ("": "=mr" (inout_2) : "0" (inout_1));
5884 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5885 they represent two separate values, so they will get different pseudo
5886 registers during expansion. Then, since the two operands need to match
5887 per the constraints, but use different pseudo registers, reload can
5888 only register a reload for these operands. But reloads can only be
5889 satisfied by hardregs, not by memory, so we need a register for this
5890 reload, just because we are presented with non-matching operands.
5891 So, even though we allow memory for this operand, no memory can be
5892 used for it, just because the two operands don't match. This can
5893 cause reload failures on register-starved targets.
5895 So it's a symptom of reload not being able to use memory for reloads
5896 or, alternatively it's also a symptom of both operands not coming into
5897 reload as matching (in which case the pseudo could go to memory just
5898 fine, as the alternative allows it, and no reload would be necessary).
5899 We fix the latter problem here, by transforming
5901 asm ("": "=mr" (inout_2) : "0" (inout_1));
5906 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5909 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
5912 bool changed
= false;
5913 rtx op
= SET_SRC (p_sets
[0]);
5914 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
5915 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
5916 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
5918 memset (output_matched
, 0, noutputs
* sizeof (bool));
5919 for (i
= 0; i
< ninputs
; i
++)
5921 rtx input
, output
, insns
;
5922 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
5926 if (*constraint
== '%')
5929 match
= strtoul (constraint
, &end
, 10);
5930 if (end
== constraint
)
5933 gcc_assert (match
< noutputs
);
5934 output
= SET_DEST (p_sets
[match
]);
5935 input
= RTVEC_ELT (inputs
, i
);
5936 /* Only do the transformation for pseudos. */
5937 if (! REG_P (output
)
5938 || rtx_equal_p (output
, input
)
5939 || (GET_MODE (input
) != VOIDmode
5940 && GET_MODE (input
) != GET_MODE (output
)))
5943 /* We can't do anything if the output is also used as input,
5944 as we're going to overwrite it. */
5945 for (j
= 0; j
< ninputs
; j
++)
5946 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
5951 /* Avoid changing the same input several times. For
5952 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5953 only change in once (to out1), rather than changing it
5954 first to out1 and afterwards to out2. */
5957 for (j
= 0; j
< noutputs
; j
++)
5958 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
5963 output_matched
[match
] = true;
5966 emit_move_insn (output
, input
);
5967 insns
= get_insns ();
5969 emit_insn_before (insns
, insn
);
5971 /* Now replace all mentions of the input with output. We can't
5972 just replace the occurrence in inputs[i], as the register might
5973 also be used in some other input (or even in an address of an
5974 output), which would mean possibly increasing the number of
5975 inputs by one (namely 'output' in addition), which might pose
5976 a too complicated problem for reload to solve. E.g. this situation:
5978 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5980 Here 'input' is used in two occurrences as input (once for the
5981 input operand, once for the address in the second output operand).
5982 If we would replace only the occurrence of the input operand (to
5983 make the matching) we would be left with this:
5986 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5988 Now we suddenly have two different input values (containing the same
5989 value, but different pseudos) where we formerly had only one.
5990 With more complicated asms this might lead to reload failures
5991 which wouldn't have happen without this pass. So, iterate over
5992 all operands and replace all occurrences of the register used. */
5993 for (j
= 0; j
< noutputs
; j
++)
5994 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
5995 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
5996 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
5998 for (j
= 0; j
< ninputs
; j
++)
5999 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6000 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6007 df_insn_rescan (insn
);
6011 rest_of_match_asm_constraints (void)
6014 rtx insn
, pat
, *p_sets
;
6017 if (!crtl
->has_asm_statement
)
6020 df_set_flags (DF_DEFER_INSN_RESCAN
);
6023 FOR_BB_INSNS (bb
, insn
)
6028 pat
= PATTERN (insn
);
6029 if (GET_CODE (pat
) == PARALLEL
)
6030 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6031 else if (GET_CODE (pat
) == SET
)
6032 p_sets
= &PATTERN (insn
), noutputs
= 1;
6036 if (GET_CODE (*p_sets
) == SET
6037 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6038 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6042 return TODO_df_finish
;
6045 struct rtl_opt_pass pass_match_asm_constraints
=
6049 "asmcons", /* name */
6051 rest_of_match_asm_constraints
, /* execute */
6054 0, /* static_pass_number */
6055 TV_NONE
, /* tv_id */
6056 0, /* properties_required */
6057 0, /* properties_provided */
6058 0, /* properties_destroyed */
6059 0, /* todo_flags_start */
6060 TODO_dump_func
/* todo_flags_finish */
6065 #include "gt-function.h"