1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
38 #include "coretypes.h"
40 #include "rtl-error.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
53 #include "basic-block.h"
57 #include "integrate.h"
58 #include "langhooks.h"
60 #include "cfglayout.h"
62 #include "tree-pass.h"
68 /* So we can assign to cfun in this file. */
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
98 int current_function_is_leaf
;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 pass_stack_ptr_mod has run. */
103 int current_function_sp_is_unchanging
;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs
;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated
;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no
;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function
* (*init_machine_status
) (void);
123 /* The currently compiled function. */
124 struct function
*cfun
= 0;
126 /* These hashes record the prologue and epilogue insns. */
127 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
128 htab_t prologue_insn_hash
;
129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
130 htab_t epilogue_insn_hash
;
133 htab_t types_used_by_vars_hash
= NULL
;
134 VEC(tree
,gc
) *types_used_by_cur_var_decl
;
136 /* Forward declarations. */
138 static struct temp_slot
*find_temp_slot_from_address (rtx
);
139 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
140 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
141 static void reorder_blocks_1 (rtx
, tree
, VEC(tree
,heap
) **);
142 static int all_blocks (tree
, tree
*);
143 static tree
*get_block_vector (tree
, int *);
144 extern tree
debug_find_var_in_block_tree (tree
, tree
);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
148 static bool contains (const_rtx
, htab_t
);
150 static void emit_return_into_block (basic_block
);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx
, void *);
154 static void do_use_return_reg (rtx
, void *);
155 static void set_insn_locators (rtx
, int) ATTRIBUTE_UNUSED
;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function
*function_p
;
162 DEF_VEC_P(function_p
);
163 DEF_VEC_ALLOC_P(function_p
,heap
);
164 static VEC(function_p
,heap
) *function_context_stack
;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
170 push_function_context (void)
173 allocate_struct_function (NULL
, false);
175 VEC_safe_push (function_p
, heap
, function_context_stack
, cfun
);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
183 pop_function_context (void)
185 struct function
*p
= VEC_pop (function_p
, function_context_stack
);
187 current_function_decl
= p
->decl
;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated
= 0;
191 generating_concat_p
= 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
199 free_after_parsing (struct function
*f
)
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
209 free_after_compilation (struct function
*f
)
211 prologue_insn_hash
= NULL
;
212 epilogue_insn_hash
= NULL
;
214 if (crtl
->emit
.regno_pointer_align
)
215 free (crtl
->emit
.regno_pointer_align
);
217 memset (crtl
, 0, sizeof (struct rtl_data
));
222 regno_reg_rtx
= NULL
;
223 insn_locators_free ();
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
231 get_frame_size (void)
233 if (FRAME_GROWS_DOWNWARD
)
234 return -frame_offset
;
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
244 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
246 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
248 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD
)
252 error_at (DECL_SOURCE_LOCATION (func
),
253 "total size of local objects too large");
260 /* Return stack slot alignment in bits for TYPE and MODE. */
263 get_stack_local_alignment (tree type
, enum machine_mode mode
)
265 unsigned int alignment
;
268 alignment
= BIGGEST_ALIGNMENT
;
270 alignment
= GET_MODE_ALIGNMENT (mode
);
272 /* Allow the frond-end to (possibly) increase the alignment of this
275 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
277 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
288 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
289 HOST_WIDE_INT size
, unsigned int alignment
,
290 HOST_WIDE_INT
*poffset
)
292 HOST_WIDE_INT this_frame_offset
;
293 int frame_off
, frame_alignment
, frame_phase
;
295 /* Calculate how many bytes the start of local variables is off from
297 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
298 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
299 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
301 /* Round the frame offset to the specified alignment. */
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD
)
309 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
310 (unsigned HOST_WIDE_INT
) alignment
)
314 = (CEIL_ROUND (start
- frame_phase
,
315 (unsigned HOST_WIDE_INT
) alignment
)
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
322 frame_offset
= this_frame_offset
;
323 else if (this_frame_offset
< start
)
325 else if (start
+ length
== frame_offset
326 && this_frame_offset
+ size
> start
+ length
)
327 frame_offset
= this_frame_offset
+ size
;
328 else if (this_frame_offset
+ size
> start
+ length
)
331 *poffset
= this_frame_offset
;
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
340 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
342 struct frame_space
*space
= ggc_alloc_frame_space ();
343 space
->next
= crtl
->frame_space_list
;
344 crtl
->frame_space_list
= space
;
345 space
->start
= start
;
346 space
->length
= end
- start
;
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
364 We do not round to stack_boundary here. */
367 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
371 int bigend_correction
= 0;
372 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
373 unsigned int alignment
, alignment_in_bits
;
377 alignment
= get_stack_local_alignment (NULL
, mode
);
378 alignment
/= BITS_PER_UNIT
;
380 else if (align
== -1)
382 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
383 size
= CEIL_ROUND (size
, alignment
);
385 else if (align
== -2)
386 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
388 alignment
= align
/ BITS_PER_UNIT
;
390 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
392 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
393 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
395 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
396 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
399 if (SUPPORTS_STACK_ALIGNMENT
)
401 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
403 if (!crtl
->stack_realign_processed
)
404 crtl
->stack_alignment_estimated
= alignment_in_bits
;
407 /* If stack is realigned and stack alignment value
408 hasn't been finalized, it is OK not to increase
409 stack_alignment_estimated. The bigger alignment
410 requirement is recorded in stack_alignment_needed
412 gcc_assert (!crtl
->stack_realign_finalized
);
413 if (!crtl
->stack_realign_needed
)
415 /* It is OK to reduce the alignment as long as the
416 requested size is 0 or the estimated stack
417 alignment >= mode alignment. */
418 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
420 || (crtl
->stack_alignment_estimated
421 >= GET_MODE_ALIGNMENT (mode
)));
422 alignment_in_bits
= crtl
->stack_alignment_estimated
;
423 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
429 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
430 crtl
->stack_alignment_needed
= alignment_in_bits
;
431 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
432 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
434 if (mode
!= BLKmode
|| size
!= 0)
436 if (kind
& ASLK_RECORD_PAD
)
438 struct frame_space
**psp
;
440 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
442 struct frame_space
*space
= *psp
;
443 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
444 alignment
, &slot_offset
))
447 if (slot_offset
> space
->start
)
448 add_frame_space (space
->start
, slot_offset
);
449 if (slot_offset
+ size
< space
->start
+ space
->length
)
450 add_frame_space (slot_offset
+ size
,
451 space
->start
+ space
->length
);
456 else if (!STACK_ALIGNMENT_NEEDED
)
458 slot_offset
= frame_offset
;
462 old_frame_offset
= frame_offset
;
464 if (FRAME_GROWS_DOWNWARD
)
466 frame_offset
-= size
;
467 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
469 if (kind
& ASLK_RECORD_PAD
)
471 if (slot_offset
> frame_offset
)
472 add_frame_space (frame_offset
, slot_offset
);
473 if (slot_offset
+ size
< old_frame_offset
)
474 add_frame_space (slot_offset
+ size
, old_frame_offset
);
479 frame_offset
+= size
;
480 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
482 if (kind
& ASLK_RECORD_PAD
)
484 if (slot_offset
> old_frame_offset
)
485 add_frame_space (old_frame_offset
, slot_offset
);
486 if (slot_offset
+ size
< frame_offset
)
487 add_frame_space (slot_offset
+ size
, frame_offset
);
492 /* On a big-endian machine, if we are allocating more space than we will use,
493 use the least significant bytes of those that are allocated. */
494 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
495 bigend_correction
= size
- GET_MODE_SIZE (mode
);
497 /* If we have already instantiated virtual registers, return the actual
498 address relative to the frame pointer. */
499 if (virtuals_instantiated
)
500 addr
= plus_constant (frame_pointer_rtx
,
502 (slot_offset
+ bigend_correction
503 + STARTING_FRAME_OFFSET
, Pmode
));
505 addr
= plus_constant (virtual_stack_vars_rtx
,
507 (slot_offset
+ bigend_correction
,
510 x
= gen_rtx_MEM (mode
, addr
);
511 set_mem_align (x
, alignment_in_bits
);
512 MEM_NOTRAP_P (x
) = 1;
515 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
517 if (frame_offset_overflow (frame_offset
, current_function_decl
))
523 /* Wrap up assign_stack_local_1 with last parameter as false. */
526 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
528 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
532 /* In order to evaluate some expressions, such as function calls returning
533 structures in memory, we need to temporarily allocate stack locations.
534 We record each allocated temporary in the following structure.
536 Associated with each temporary slot is a nesting level. When we pop up
537 one level, all temporaries associated with the previous level are freed.
538 Normally, all temporaries are freed after the execution of the statement
539 in which they were created. However, if we are inside a ({...}) grouping,
540 the result may be in a temporary and hence must be preserved. If the
541 result could be in a temporary, we preserve it if we can determine which
542 one it is in. If we cannot determine which temporary may contain the
543 result, all temporaries are preserved. A temporary is preserved by
544 pretending it was allocated at the previous nesting level.
546 Automatic variables are also assigned temporary slots, at the nesting
547 level where they are defined. They are marked a "kept" so that
548 free_temp_slots will not free them. */
550 struct GTY(()) temp_slot
{
551 /* Points to next temporary slot. */
552 struct temp_slot
*next
;
553 /* Points to previous temporary slot. */
554 struct temp_slot
*prev
;
555 /* The rtx to used to reference the slot. */
557 /* The size, in units, of the slot. */
559 /* The type of the object in the slot, or zero if it doesn't correspond
560 to a type. We use this to determine whether a slot can be reused.
561 It can be reused if objects of the type of the new slot will always
562 conflict with objects of the type of the old slot. */
564 /* The alignment (in bits) of the slot. */
566 /* Nonzero if this temporary is currently in use. */
568 /* Nonzero if this temporary has its address taken. */
570 /* Nesting level at which this slot is being used. */
572 /* Nonzero if this should survive a call to free_temp_slots. */
574 /* The offset of the slot from the frame_pointer, including extra space
575 for alignment. This info is for combine_temp_slots. */
576 HOST_WIDE_INT base_offset
;
577 /* The size of the slot, including extra space for alignment. This
578 info is for combine_temp_slots. */
579 HOST_WIDE_INT full_size
;
582 /* A table of addresses that represent a stack slot. The table is a mapping
583 from address RTXen to a temp slot. */
584 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
586 /* Entry for the above hash table. */
587 struct GTY(()) temp_slot_address_entry
{
590 struct temp_slot
*temp_slot
;
593 /* Removes temporary slot TEMP from LIST. */
596 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
599 temp
->next
->prev
= temp
->prev
;
601 temp
->prev
->next
= temp
->next
;
605 temp
->prev
= temp
->next
= NULL
;
608 /* Inserts temporary slot TEMP to LIST. */
611 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
615 (*list
)->prev
= temp
;
620 /* Returns the list of used temp slots at LEVEL. */
622 static struct temp_slot
**
623 temp_slots_at_level (int level
)
625 if (level
>= (int) VEC_length (temp_slot_p
, used_temp_slots
))
626 VEC_safe_grow_cleared (temp_slot_p
, gc
, used_temp_slots
, level
+ 1);
628 return &(VEC_address (temp_slot_p
, used_temp_slots
)[level
]);
631 /* Returns the maximal temporary slot level. */
634 max_slot_level (void)
636 if (!used_temp_slots
)
639 return VEC_length (temp_slot_p
, used_temp_slots
) - 1;
642 /* Moves temporary slot TEMP to LEVEL. */
645 move_slot_to_level (struct temp_slot
*temp
, int level
)
647 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
648 insert_slot_to_list (temp
, temp_slots_at_level (level
));
652 /* Make temporary slot TEMP available. */
655 make_slot_available (struct temp_slot
*temp
)
657 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
658 insert_slot_to_list (temp
, &avail_temp_slots
);
663 /* Compute the hash value for an address -> temp slot mapping.
664 The value is cached on the mapping entry. */
666 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
668 int do_not_record
= 0;
669 return hash_rtx (t
->address
, GET_MODE (t
->address
),
670 &do_not_record
, NULL
, false);
673 /* Return the hash value for an address -> temp slot mapping. */
675 temp_slot_address_hash (const void *p
)
677 const struct temp_slot_address_entry
*t
;
678 t
= (const struct temp_slot_address_entry
*) p
;
682 /* Compare two address -> temp slot mapping entries. */
684 temp_slot_address_eq (const void *p1
, const void *p2
)
686 const struct temp_slot_address_entry
*t1
, *t2
;
687 t1
= (const struct temp_slot_address_entry
*) p1
;
688 t2
= (const struct temp_slot_address_entry
*) p2
;
689 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
692 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
694 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
697 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
698 t
->address
= address
;
699 t
->temp_slot
= temp_slot
;
700 t
->hash
= temp_slot_address_compute_hash (t
);
701 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
705 /* Remove an address -> temp slot mapping entry if the temp slot is
706 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
708 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
710 const struct temp_slot_address_entry
*t
;
711 t
= (const struct temp_slot_address_entry
*) *slot
;
712 if (! t
->temp_slot
->in_use
)
717 /* Remove all mappings of addresses to unused temp slots. */
719 remove_unused_temp_slot_addresses (void)
721 htab_traverse (temp_slot_address_table
,
722 remove_unused_temp_slot_addresses_1
,
726 /* Find the temp slot corresponding to the object at address X. */
728 static struct temp_slot
*
729 find_temp_slot_from_address (rtx x
)
732 struct temp_slot_address_entry tmp
, *t
;
734 /* First try the easy way:
735 See if X exists in the address -> temp slot mapping. */
737 tmp
.temp_slot
= NULL
;
738 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
739 t
= (struct temp_slot_address_entry
*)
740 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
744 /* If we have a sum involving a register, see if it points to a temp
746 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
747 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
749 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
750 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
753 /* Last resort: Address is a virtual stack var address. */
754 if (GET_CODE (x
) == PLUS
755 && XEXP (x
, 0) == virtual_stack_vars_rtx
756 && CONST_INT_P (XEXP (x
, 1)))
759 for (i
= max_slot_level (); i
>= 0; i
--)
760 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
762 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
763 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
771 /* Allocate a temporary stack slot and record it for possible later
774 MODE is the machine mode to be given to the returned rtx.
776 SIZE is the size in units of the space required. We do no rounding here
777 since assign_stack_local will do any required rounding.
779 KEEP is 1 if this slot is to be retained after a call to
780 free_temp_slots. Automatic variables for a block are allocated
781 with this flag. KEEP values of 2 or 3 were needed respectively
782 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
783 or for SAVE_EXPRs, but they are now unused.
785 TYPE is the type that will be used for the stack slot. */
788 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
792 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
795 /* If SIZE is -1 it means that somebody tried to allocate a temporary
796 of a variable size. */
797 gcc_assert (size
!= -1);
799 /* These are now unused. */
800 gcc_assert (keep
<= 1);
802 align
= get_stack_local_alignment (type
, mode
);
804 /* Try to find an available, already-allocated temporary of the proper
805 mode which meets the size and alignment requirements. Choose the
806 smallest one with the closest alignment.
808 If assign_stack_temp is called outside of the tree->rtl expansion,
809 we cannot reuse the stack slots (that may still refer to
810 VIRTUAL_STACK_VARS_REGNUM). */
811 if (!virtuals_instantiated
)
813 for (p
= avail_temp_slots
; p
; p
= p
->next
)
815 if (p
->align
>= align
&& p
->size
>= size
816 && GET_MODE (p
->slot
) == mode
817 && objects_must_conflict_p (p
->type
, type
)
818 && (best_p
== 0 || best_p
->size
> p
->size
819 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
821 if (p
->align
== align
&& p
->size
== size
)
824 cut_slot_from_list (selected
, &avail_temp_slots
);
833 /* Make our best, if any, the one to use. */
837 cut_slot_from_list (selected
, &avail_temp_slots
);
839 /* If there are enough aligned bytes left over, make them into a new
840 temp_slot so that the extra bytes don't get wasted. Do this only
841 for BLKmode slots, so that we can be sure of the alignment. */
842 if (GET_MODE (best_p
->slot
) == BLKmode
)
844 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
845 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
847 if (best_p
->size
- rounded_size
>= alignment
)
849 p
= ggc_alloc_temp_slot ();
850 p
->in_use
= p
->addr_taken
= 0;
851 p
->size
= best_p
->size
- rounded_size
;
852 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
853 p
->full_size
= best_p
->full_size
- rounded_size
;
854 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
855 p
->align
= best_p
->align
;
856 p
->type
= best_p
->type
;
857 insert_slot_to_list (p
, &avail_temp_slots
);
859 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
862 best_p
->size
= rounded_size
;
863 best_p
->full_size
= rounded_size
;
868 /* If we still didn't find one, make a new temporary. */
871 HOST_WIDE_INT frame_offset_old
= frame_offset
;
873 p
= ggc_alloc_temp_slot ();
875 /* We are passing an explicit alignment request to assign_stack_local.
876 One side effect of that is assign_stack_local will not round SIZE
877 to ensure the frame offset remains suitably aligned.
879 So for requests which depended on the rounding of SIZE, we go ahead
880 and round it now. We also make sure ALIGNMENT is at least
881 BIGGEST_ALIGNMENT. */
882 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
883 p
->slot
= assign_stack_local_1 (mode
,
893 /* The following slot size computation is necessary because we don't
894 know the actual size of the temporary slot until assign_stack_local
895 has performed all the frame alignment and size rounding for the
896 requested temporary. Note that extra space added for alignment
897 can be either above or below this stack slot depending on which
898 way the frame grows. We include the extra space if and only if it
899 is above this slot. */
900 if (FRAME_GROWS_DOWNWARD
)
901 p
->size
= frame_offset_old
- frame_offset
;
905 /* Now define the fields used by combine_temp_slots. */
906 if (FRAME_GROWS_DOWNWARD
)
908 p
->base_offset
= frame_offset
;
909 p
->full_size
= frame_offset_old
- frame_offset
;
913 p
->base_offset
= frame_offset_old
;
914 p
->full_size
= frame_offset
- frame_offset_old
;
924 p
->level
= temp_slot_level
;
927 pp
= temp_slots_at_level (p
->level
);
928 insert_slot_to_list (p
, pp
);
929 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
931 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
932 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
933 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
935 /* If we know the alias set for the memory that will be used, use
936 it. If there's no TYPE, then we don't know anything about the
937 alias set for the memory. */
938 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
939 set_mem_align (slot
, align
);
941 /* If a type is specified, set the relevant flags. */
944 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
945 gcc_checking_assert (!MEM_SCALAR_P (slot
) && !MEM_IN_STRUCT_P (slot
));
946 if (AGGREGATE_TYPE_P (type
) || TREE_CODE (type
) == COMPLEX_TYPE
)
947 MEM_IN_STRUCT_P (slot
) = 1;
949 MEM_SCALAR_P (slot
) = 1;
951 MEM_NOTRAP_P (slot
) = 1;
956 /* Allocate a temporary stack slot and record it for possible later
957 reuse. First three arguments are same as in preceding function. */
960 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
, int keep
)
962 return assign_stack_temp_for_type (mode
, size
, keep
, NULL_TREE
);
965 /* Assign a temporary.
966 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
967 and so that should be used in error messages. In either case, we
968 allocate of the given type.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
976 assign_temp (tree type_or_decl
, int keep
, int memory_required
,
977 int dont_promote ATTRIBUTE_UNUSED
)
980 enum machine_mode mode
;
985 if (DECL_P (type_or_decl
))
986 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
988 decl
= NULL
, type
= type_or_decl
;
990 mode
= TYPE_MODE (type
);
992 unsignedp
= TYPE_UNSIGNED (type
);
995 if (mode
== BLKmode
|| memory_required
)
997 HOST_WIDE_INT size
= int_size_in_bytes (type
);
1000 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
1001 problems with allocating the stack space. */
1005 /* Unfortunately, we don't yet know how to allocate variable-sized
1006 temporaries. However, sometimes we can find a fixed upper limit on
1007 the size, so try that instead. */
1008 else if (size
== -1)
1009 size
= max_int_size_in_bytes (type
);
1011 /* The size of the temporary may be too large to fit into an integer. */
1012 /* ??? Not sure this should happen except for user silliness, so limit
1013 this to things that aren't compiler-generated temporaries. The
1014 rest of the time we'll die in assign_stack_temp_for_type. */
1015 if (decl
&& size
== -1
1016 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1018 error ("size of variable %q+D is too large", decl
);
1022 tmp
= assign_stack_temp_for_type (mode
, size
, keep
, type
);
1028 mode
= promote_mode (type
, mode
, &unsignedp
);
1031 return gen_reg_rtx (mode
);
1034 /* Combine temporary stack slots which are adjacent on the stack.
1036 This allows for better use of already allocated stack space. This is only
1037 done for BLKmode slots because we can be sure that we won't have alignment
1038 problems in this case. */
1041 combine_temp_slots (void)
1043 struct temp_slot
*p
, *q
, *next
, *next_q
;
1046 /* We can't combine slots, because the information about which slot
1047 is in which alias set will be lost. */
1048 if (flag_strict_aliasing
)
1051 /* If there are a lot of temp slots, don't do anything unless
1052 high levels of optimization. */
1053 if (! flag_expensive_optimizations
)
1054 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1055 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1058 for (p
= avail_temp_slots
; p
; p
= next
)
1064 if (GET_MODE (p
->slot
) != BLKmode
)
1067 for (q
= p
->next
; q
; q
= next_q
)
1073 if (GET_MODE (q
->slot
) != BLKmode
)
1076 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1078 /* Q comes after P; combine Q into P. */
1080 p
->full_size
+= q
->full_size
;
1083 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1085 /* P comes after Q; combine P into Q. */
1087 q
->full_size
+= p
->full_size
;
1092 cut_slot_from_list (q
, &avail_temp_slots
);
1095 /* Either delete P or advance past it. */
1097 cut_slot_from_list (p
, &avail_temp_slots
);
1101 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1102 slot that previously was known by OLD_RTX. */
1105 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1107 struct temp_slot
*p
;
1109 if (rtx_equal_p (old_rtx
, new_rtx
))
1112 p
= find_temp_slot_from_address (old_rtx
);
1114 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1115 NEW_RTX is a register, see if one operand of the PLUS is a
1116 temporary location. If so, NEW_RTX points into it. Otherwise,
1117 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1118 in common between them. If so, try a recursive call on those
1122 if (GET_CODE (old_rtx
) != PLUS
)
1125 if (REG_P (new_rtx
))
1127 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1128 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1131 else if (GET_CODE (new_rtx
) != PLUS
)
1134 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1135 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1136 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1137 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1138 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1139 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1140 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1141 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1146 /* Otherwise add an alias for the temp's address. */
1147 insert_temp_slot_address (new_rtx
, p
);
1150 /* If X could be a reference to a temporary slot, mark the fact that its
1151 address was taken. */
1154 mark_temp_addr_taken (rtx x
)
1156 struct temp_slot
*p
;
1161 /* If X is not in memory or is at a constant address, it cannot be in
1162 a temporary slot. */
1163 if (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0)))
1166 p
= find_temp_slot_from_address (XEXP (x
, 0));
1171 /* If X could be a reference to a temporary slot, mark that slot as
1172 belonging to the to one level higher than the current level. If X
1173 matched one of our slots, just mark that one. Otherwise, we can't
1174 easily predict which it is, so upgrade all of them. Kept slots
1175 need not be touched.
1177 This is called when an ({...}) construct occurs and a statement
1178 returns a value in memory. */
1181 preserve_temp_slots (rtx x
)
1183 struct temp_slot
*p
= 0, *next
;
1185 /* If there is no result, we still might have some objects whose address
1186 were taken, so we need to make sure they stay around. */
1189 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1194 move_slot_to_level (p
, temp_slot_level
- 1);
1200 /* If X is a register that is being used as a pointer, see if we have
1201 a temporary slot we know it points to. To be consistent with
1202 the code below, we really should preserve all non-kept slots
1203 if we can't find a match, but that seems to be much too costly. */
1204 if (REG_P (x
) && REG_POINTER (x
))
1205 p
= find_temp_slot_from_address (x
);
1207 /* If X is not in memory or is at a constant address, it cannot be in
1208 a temporary slot, but it can contain something whose address was
1210 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1212 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1217 move_slot_to_level (p
, temp_slot_level
- 1);
1223 /* First see if we can find a match. */
1225 p
= find_temp_slot_from_address (XEXP (x
, 0));
1229 /* Move everything at our level whose address was taken to our new
1230 level in case we used its address. */
1231 struct temp_slot
*q
;
1233 if (p
->level
== temp_slot_level
)
1235 for (q
= *temp_slots_at_level (temp_slot_level
); q
; q
= next
)
1239 if (p
!= q
&& q
->addr_taken
)
1240 move_slot_to_level (q
, temp_slot_level
- 1);
1243 move_slot_to_level (p
, temp_slot_level
- 1);
1249 /* Otherwise, preserve all non-kept slots at this level. */
1250 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1255 move_slot_to_level (p
, temp_slot_level
- 1);
1259 /* Free all temporaries used so far. This is normally called at the
1260 end of generating code for a statement. */
1263 free_temp_slots (void)
1265 struct temp_slot
*p
, *next
;
1266 bool some_available
= false;
1268 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1274 make_slot_available (p
);
1275 some_available
= true;
1281 remove_unused_temp_slot_addresses ();
1282 combine_temp_slots ();
1286 /* Push deeper into the nesting level for stack temporaries. */
1289 push_temp_slots (void)
1294 /* Pop a temporary nesting level. All slots in use in the current level
1298 pop_temp_slots (void)
1300 struct temp_slot
*p
, *next
;
1301 bool some_available
= false;
1303 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1306 make_slot_available (p
);
1307 some_available
= true;
1312 remove_unused_temp_slot_addresses ();
1313 combine_temp_slots ();
1319 /* Initialize temporary slots. */
1322 init_temp_slots (void)
1324 /* We have not allocated any temporaries yet. */
1325 avail_temp_slots
= 0;
1326 used_temp_slots
= 0;
1327 temp_slot_level
= 0;
1329 /* Set up the table to map addresses to temp slots. */
1330 if (! temp_slot_address_table
)
1331 temp_slot_address_table
= htab_create_ggc (32,
1332 temp_slot_address_hash
,
1333 temp_slot_address_eq
,
1336 htab_empty (temp_slot_address_table
);
1339 /* These routines are responsible for converting virtual register references
1340 to the actual hard register references once RTL generation is complete.
1342 The following four variables are used for communication between the
1343 routines. They contain the offsets of the virtual registers from their
1344 respective hard registers. */
1346 static int in_arg_offset
;
1347 static int var_offset
;
1348 static int dynamic_offset
;
1349 static int out_arg_offset
;
1350 static int cfa_offset
;
1352 /* In most machines, the stack pointer register is equivalent to the bottom
1355 #ifndef STACK_POINTER_OFFSET
1356 #define STACK_POINTER_OFFSET 0
1359 /* If not defined, pick an appropriate default for the offset of dynamically
1360 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1361 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1363 #ifndef STACK_DYNAMIC_OFFSET
1365 /* The bottom of the stack points to the actual arguments. If
1366 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1367 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1368 stack space for register parameters is not pushed by the caller, but
1369 rather part of the fixed stack areas and hence not included in
1370 `crtl->outgoing_args_size'. Nevertheless, we must allow
1371 for it when allocating stack dynamic objects. */
1373 #if defined(REG_PARM_STACK_SPACE)
1374 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1375 ((ACCUMULATE_OUTGOING_ARGS \
1376 ? (crtl->outgoing_args_size \
1377 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1378 : REG_PARM_STACK_SPACE (FNDECL))) \
1379 : 0) + (STACK_POINTER_OFFSET))
1381 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1382 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1383 + (STACK_POINTER_OFFSET))
1388 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1389 is a virtual register, return the equivalent hard register and set the
1390 offset indirectly through the pointer. Otherwise, return 0. */
1393 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1396 HOST_WIDE_INT offset
;
1398 if (x
== virtual_incoming_args_rtx
)
1400 if (stack_realign_drap
)
1402 /* Replace virtual_incoming_args_rtx with internal arg
1403 pointer if DRAP is used to realign stack. */
1404 new_rtx
= crtl
->args
.internal_arg_pointer
;
1408 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1410 else if (x
== virtual_stack_vars_rtx
)
1411 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1412 else if (x
== virtual_stack_dynamic_rtx
)
1413 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1414 else if (x
== virtual_outgoing_args_rtx
)
1415 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1416 else if (x
== virtual_cfa_rtx
)
1418 #ifdef FRAME_POINTER_CFA_OFFSET
1419 new_rtx
= frame_pointer_rtx
;
1421 new_rtx
= arg_pointer_rtx
;
1423 offset
= cfa_offset
;
1425 else if (x
== virtual_preferred_stack_boundary_rtx
)
1427 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1437 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1438 Instantiate any virtual registers present inside of *LOC. The expression
1439 is simplified, as much as possible, but is not to be considered "valid"
1440 in any sense implied by the target. If any change is made, set CHANGED
1444 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1446 HOST_WIDE_INT offset
;
1447 bool *changed
= (bool *) data
;
1454 switch (GET_CODE (x
))
1457 new_rtx
= instantiate_new_reg (x
, &offset
);
1460 *loc
= plus_constant (new_rtx
, offset
);
1467 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1470 new_rtx
= plus_constant (new_rtx
, offset
);
1471 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1477 /* FIXME -- from old code */
1478 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1479 we can commute the PLUS and SUBREG because pointers into the
1480 frame are well-behaved. */
1490 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1491 matches the predicate for insn CODE operand OPERAND. */
1494 safe_insn_predicate (int code
, int operand
, rtx x
)
1496 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1499 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1500 registers present inside of insn. The result will be a valid insn. */
1503 instantiate_virtual_regs_in_insn (rtx insn
)
1505 HOST_WIDE_INT offset
;
1507 bool any_change
= false;
1508 rtx set
, new_rtx
, x
, seq
;
1510 /* There are some special cases to be handled first. */
1511 set
= single_set (insn
);
1514 /* We're allowed to assign to a virtual register. This is interpreted
1515 to mean that the underlying register gets assigned the inverse
1516 transformation. This is used, for example, in the handling of
1518 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1523 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1524 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1526 x
= force_operand (x
, new_rtx
);
1528 emit_move_insn (new_rtx
, x
);
1533 emit_insn_before (seq
, insn
);
1538 /* Handle a straight copy from a virtual register by generating a
1539 new add insn. The difference between this and falling through
1540 to the generic case is avoiding a new pseudo and eliminating a
1541 move insn in the initial rtl stream. */
1542 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1543 if (new_rtx
&& offset
!= 0
1544 && REG_P (SET_DEST (set
))
1545 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1549 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1550 new_rtx
, GEN_INT (offset
), SET_DEST (set
),
1551 1, OPTAB_LIB_WIDEN
);
1552 if (x
!= SET_DEST (set
))
1553 emit_move_insn (SET_DEST (set
), x
);
1558 emit_insn_before (seq
, insn
);
1563 extract_insn (insn
);
1564 insn_code
= INSN_CODE (insn
);
1566 /* Handle a plus involving a virtual register by determining if the
1567 operands remain valid if they're modified in place. */
1568 if (GET_CODE (SET_SRC (set
)) == PLUS
1569 && recog_data
.n_operands
>= 3
1570 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1571 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1572 && CONST_INT_P (recog_data
.operand
[2])
1573 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1575 offset
+= INTVAL (recog_data
.operand
[2]);
1577 /* If the sum is zero, then replace with a plain move. */
1579 && REG_P (SET_DEST (set
))
1580 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1583 emit_move_insn (SET_DEST (set
), new_rtx
);
1587 emit_insn_before (seq
, insn
);
1592 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1594 /* Using validate_change and apply_change_group here leaves
1595 recog_data in an invalid state. Since we know exactly what
1596 we want to check, do those two by hand. */
1597 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1598 && safe_insn_predicate (insn_code
, 2, x
))
1600 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1601 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1604 /* Fall through into the regular operand fixup loop in
1605 order to take care of operands other than 1 and 2. */
1611 extract_insn (insn
);
1612 insn_code
= INSN_CODE (insn
);
1615 /* In the general case, we expect virtual registers to appear only in
1616 operands, and then only as either bare registers or inside memories. */
1617 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1619 x
= recog_data
.operand
[i
];
1620 switch (GET_CODE (x
))
1624 rtx addr
= XEXP (x
, 0);
1625 bool changed
= false;
1627 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1632 x
= replace_equiv_address (x
, addr
);
1633 /* It may happen that the address with the virtual reg
1634 was valid (e.g. based on the virtual stack reg, which might
1635 be acceptable to the predicates with all offsets), whereas
1636 the address now isn't anymore, for instance when the address
1637 is still offsetted, but the base reg isn't virtual-stack-reg
1638 anymore. Below we would do a force_reg on the whole operand,
1639 but this insn might actually only accept memory. Hence,
1640 before doing that last resort, try to reload the address into
1641 a register, so this operand stays a MEM. */
1642 if (!safe_insn_predicate (insn_code
, i
, x
))
1644 addr
= force_reg (GET_MODE (addr
), addr
);
1645 x
= replace_equiv_address (x
, addr
);
1650 emit_insn_before (seq
, insn
);
1655 new_rtx
= instantiate_new_reg (x
, &offset
);
1656 if (new_rtx
== NULL
)
1664 /* Careful, special mode predicates may have stuff in
1665 insn_data[insn_code].operand[i].mode that isn't useful
1666 to us for computing a new value. */
1667 /* ??? Recognize address_operand and/or "p" constraints
1668 to see if (plus new offset) is a valid before we put
1669 this through expand_simple_binop. */
1670 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1671 GEN_INT (offset
), NULL_RTX
,
1672 1, OPTAB_LIB_WIDEN
);
1675 emit_insn_before (seq
, insn
);
1680 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1681 if (new_rtx
== NULL
)
1686 new_rtx
= expand_simple_binop (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1687 GEN_INT (offset
), NULL_RTX
,
1688 1, OPTAB_LIB_WIDEN
);
1691 emit_insn_before (seq
, insn
);
1693 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1694 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1702 /* At this point, X contains the new value for the operand.
1703 Validate the new value vs the insn predicate. Note that
1704 asm insns will have insn_code -1 here. */
1705 if (!safe_insn_predicate (insn_code
, i
, x
))
1710 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1711 x
= copy_to_reg (x
);
1714 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1718 emit_insn_before (seq
, insn
);
1721 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1727 /* Propagate operand changes into the duplicates. */
1728 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1729 *recog_data
.dup_loc
[i
]
1730 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1732 /* Force re-recognition of the instruction for validation. */
1733 INSN_CODE (insn
) = -1;
1736 if (asm_noperands (PATTERN (insn
)) >= 0)
1738 if (!check_asm_operands (PATTERN (insn
)))
1740 error_for_asm (insn
, "impossible constraint in %<asm%>");
1746 if (recog_memoized (insn
) < 0)
1747 fatal_insn_not_found (insn
);
1751 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1752 do any instantiation required. */
1755 instantiate_decl_rtl (rtx x
)
1762 /* If this is a CONCAT, recurse for the pieces. */
1763 if (GET_CODE (x
) == CONCAT
)
1765 instantiate_decl_rtl (XEXP (x
, 0));
1766 instantiate_decl_rtl (XEXP (x
, 1));
1770 /* If this is not a MEM, no need to do anything. Similarly if the
1771 address is a constant or a register that is not a virtual register. */
1776 if (CONSTANT_P (addr
)
1778 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1779 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1782 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1785 /* Helper for instantiate_decls called via walk_tree: Process all decls
1786 in the given DECL_VALUE_EXPR. */
1789 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1797 if (DECL_RTL_SET_P (t
))
1798 instantiate_decl_rtl (DECL_RTL (t
));
1799 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1800 && DECL_INCOMING_RTL (t
))
1801 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1802 if ((TREE_CODE (t
) == VAR_DECL
1803 || TREE_CODE (t
) == RESULT_DECL
)
1804 && DECL_HAS_VALUE_EXPR_P (t
))
1806 tree v
= DECL_VALUE_EXPR (t
);
1807 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1814 /* Subroutine of instantiate_decls: Process all decls in the given
1815 BLOCK node and all its subblocks. */
1818 instantiate_decls_1 (tree let
)
1822 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1824 if (DECL_RTL_SET_P (t
))
1825 instantiate_decl_rtl (DECL_RTL (t
));
1826 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1828 tree v
= DECL_VALUE_EXPR (t
);
1829 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1833 /* Process all subblocks. */
1834 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1835 instantiate_decls_1 (t
);
1838 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1839 all virtual registers in their DECL_RTL's. */
1842 instantiate_decls (tree fndecl
)
1847 /* Process all parameters of the function. */
1848 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1850 instantiate_decl_rtl (DECL_RTL (decl
));
1851 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1852 if (DECL_HAS_VALUE_EXPR_P (decl
))
1854 tree v
= DECL_VALUE_EXPR (decl
);
1855 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1859 if ((decl
= DECL_RESULT (fndecl
))
1860 && TREE_CODE (decl
) == RESULT_DECL
)
1862 if (DECL_RTL_SET_P (decl
))
1863 instantiate_decl_rtl (DECL_RTL (decl
));
1864 if (DECL_HAS_VALUE_EXPR_P (decl
))
1866 tree v
= DECL_VALUE_EXPR (decl
);
1867 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1871 /* Now process all variables defined in the function or its subblocks. */
1872 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1874 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1875 if (DECL_RTL_SET_P (decl
))
1876 instantiate_decl_rtl (DECL_RTL (decl
));
1877 VEC_free (tree
, gc
, cfun
->local_decls
);
1880 /* Pass through the INSNS of function FNDECL and convert virtual register
1881 references to hard register references. */
1884 instantiate_virtual_regs (void)
1888 /* Compute the offsets to use for this function. */
1889 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1890 var_offset
= STARTING_FRAME_OFFSET
;
1891 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1892 out_arg_offset
= STACK_POINTER_OFFSET
;
1893 #ifdef FRAME_POINTER_CFA_OFFSET
1894 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1896 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1899 /* Initialize recognition, indicating that volatile is OK. */
1902 /* Scan through all the insns, instantiating every virtual register still
1904 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1907 /* These patterns in the instruction stream can never be recognized.
1908 Fortunately, they shouldn't contain virtual registers either. */
1909 if (GET_CODE (PATTERN (insn
)) == USE
1910 || GET_CODE (PATTERN (insn
)) == CLOBBER
1911 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
1912 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
1913 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1915 else if (DEBUG_INSN_P (insn
))
1916 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1917 instantiate_virtual_regs_in_rtx
, NULL
);
1919 instantiate_virtual_regs_in_insn (insn
);
1921 if (INSN_DELETED_P (insn
))
1924 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1926 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1928 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1929 instantiate_virtual_regs_in_rtx
, NULL
);
1932 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1933 instantiate_decls (current_function_decl
);
1935 targetm
.instantiate_decls ();
1937 /* Indicate that, from now on, assign_stack_local should use
1938 frame_pointer_rtx. */
1939 virtuals_instantiated
= 1;
1941 /* See allocate_dynamic_stack_space for the rationale. */
1942 #ifdef SETJMP_VIA_SAVE_AREA
1943 if (flag_stack_usage
&& cfun
->calls_setjmp
)
1945 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1946 dynamic_offset
= (dynamic_offset
+ align
- 1) / align
* align
;
1947 current_function_dynamic_stack_size
1948 += current_function_dynamic_alloc_count
* dynamic_offset
;
1955 struct rtl_opt_pass pass_instantiate_virtual_regs
=
1961 instantiate_virtual_regs
, /* execute */
1964 0, /* static_pass_number */
1965 TV_NONE
, /* tv_id */
1966 0, /* properties_required */
1967 0, /* properties_provided */
1968 0, /* properties_destroyed */
1969 0, /* todo_flags_start */
1970 TODO_dump_func
/* todo_flags_finish */
1975 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1976 This means a type for which function calls must pass an address to the
1977 function or get an address back from the function.
1978 EXP may be a type node or an expression (whose type is tested). */
1981 aggregate_value_p (const_tree exp
, const_tree fntype
)
1983 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1984 int i
, regno
, nregs
;
1988 switch (TREE_CODE (fntype
))
1992 tree fndecl
= get_callee_fndecl (fntype
);
1994 ? TREE_TYPE (fndecl
)
1995 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
1999 fntype
= TREE_TYPE (fntype
);
2004 case IDENTIFIER_NODE
:
2008 /* We don't expect other tree types here. */
2012 if (VOID_TYPE_P (type
))
2015 /* If a record should be passed the same as its first (and only) member
2016 don't pass it as an aggregate. */
2017 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2018 return aggregate_value_p (first_field (type
), fntype
);
2020 /* If the front end has decided that this needs to be passed by
2021 reference, do so. */
2022 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2023 && DECL_BY_REFERENCE (exp
))
2026 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2027 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2030 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2031 and thus can't be returned in registers. */
2032 if (TREE_ADDRESSABLE (type
))
2035 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2038 if (targetm
.calls
.return_in_memory (type
, fntype
))
2041 /* Make sure we have suitable call-clobbered regs to return
2042 the value in; if not, we must return it in memory. */
2043 reg
= hard_function_value (type
, 0, fntype
, 0);
2045 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2050 regno
= REGNO (reg
);
2051 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2052 for (i
= 0; i
< nregs
; i
++)
2053 if (! call_used_regs
[regno
+ i
])
2059 /* Return true if we should assign DECL a pseudo register; false if it
2060 should live on the local stack. */
2063 use_register_for_decl (const_tree decl
)
2065 if (!targetm
.calls
.allocate_stack_slots_for_args())
2068 /* Honor volatile. */
2069 if (TREE_SIDE_EFFECTS (decl
))
2072 /* Honor addressability. */
2073 if (TREE_ADDRESSABLE (decl
))
2076 /* Only register-like things go in registers. */
2077 if (DECL_MODE (decl
) == BLKmode
)
2080 /* If -ffloat-store specified, don't put explicit float variables
2082 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2083 propagates values across these stores, and it probably shouldn't. */
2084 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2087 /* If we're not interested in tracking debugging information for
2088 this decl, then we can certainly put it in a register. */
2089 if (DECL_IGNORED_P (decl
))
2095 if (!DECL_REGISTER (decl
))
2098 switch (TREE_CODE (TREE_TYPE (decl
)))
2102 case QUAL_UNION_TYPE
:
2103 /* When not optimizing, disregard register keyword for variables with
2104 types containing methods, otherwise the methods won't be callable
2105 from the debugger. */
2106 if (TYPE_METHODS (TREE_TYPE (decl
)))
2116 /* Return true if TYPE should be passed by invisible reference. */
2119 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2120 tree type
, bool named_arg
)
2124 /* If this type contains non-trivial constructors, then it is
2125 forbidden for the middle-end to create any new copies. */
2126 if (TREE_ADDRESSABLE (type
))
2129 /* GCC post 3.4 passes *all* variable sized types by reference. */
2130 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2133 /* If a record type should be passed the same as its first (and only)
2134 member, use the type and mode of that member. */
2135 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2137 type
= TREE_TYPE (first_field (type
));
2138 mode
= TYPE_MODE (type
);
2142 return targetm
.calls
.pass_by_reference (ca
, mode
, type
, named_arg
);
2145 /* Return true if TYPE, which is passed by reference, should be callee
2146 copied instead of caller copied. */
2149 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2150 tree type
, bool named_arg
)
2152 if (type
&& TREE_ADDRESSABLE (type
))
2154 return targetm
.calls
.callee_copies (ca
, mode
, type
, named_arg
);
2157 /* Structures to communicate between the subroutines of assign_parms.
2158 The first holds data persistent across all parameters, the second
2159 is cleared out for each parameter. */
2161 struct assign_parm_data_all
2163 CUMULATIVE_ARGS args_so_far
;
2164 struct args_size stack_args_size
;
2165 tree function_result_decl
;
2167 rtx first_conversion_insn
;
2168 rtx last_conversion_insn
;
2169 HOST_WIDE_INT pretend_args_size
;
2170 HOST_WIDE_INT extra_pretend_bytes
;
2171 int reg_parm_stack_space
;
2174 struct assign_parm_data_one
2180 enum machine_mode nominal_mode
;
2181 enum machine_mode passed_mode
;
2182 enum machine_mode promoted_mode
;
2183 struct locate_and_pad_arg_data locate
;
2185 BOOL_BITFIELD named_arg
: 1;
2186 BOOL_BITFIELD passed_pointer
: 1;
2187 BOOL_BITFIELD on_stack
: 1;
2188 BOOL_BITFIELD loaded_in_reg
: 1;
2191 /* A subroutine of assign_parms. Initialize ALL. */
2194 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2196 tree fntype ATTRIBUTE_UNUSED
;
2198 memset (all
, 0, sizeof (*all
));
2200 fntype
= TREE_TYPE (current_function_decl
);
2202 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2203 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far
, fntype
, NULL_RTX
);
2205 INIT_CUMULATIVE_ARGS (all
->args_so_far
, fntype
, NULL_RTX
,
2206 current_function_decl
, -1);
2209 #ifdef REG_PARM_STACK_SPACE
2210 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2214 /* If ARGS contains entries with complex types, split the entry into two
2215 entries of the component type. Return a new list of substitutions are
2216 needed, else the old list. */
2219 split_complex_args (VEC(tree
, heap
) **args
)
2224 FOR_EACH_VEC_ELT (tree
, *args
, i
, p
)
2226 tree type
= TREE_TYPE (p
);
2227 if (TREE_CODE (type
) == COMPLEX_TYPE
2228 && targetm
.calls
.split_complex_arg (type
))
2231 tree subtype
= TREE_TYPE (type
);
2232 bool addressable
= TREE_ADDRESSABLE (p
);
2234 /* Rewrite the PARM_DECL's type with its component. */
2236 TREE_TYPE (p
) = subtype
;
2237 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2238 DECL_MODE (p
) = VOIDmode
;
2239 DECL_SIZE (p
) = NULL
;
2240 DECL_SIZE_UNIT (p
) = NULL
;
2241 /* If this arg must go in memory, put it in a pseudo here.
2242 We can't allow it to go in memory as per normal parms,
2243 because the usual place might not have the imag part
2244 adjacent to the real part. */
2245 DECL_ARTIFICIAL (p
) = addressable
;
2246 DECL_IGNORED_P (p
) = addressable
;
2247 TREE_ADDRESSABLE (p
) = 0;
2249 VEC_replace (tree
, *args
, i
, p
);
2251 /* Build a second synthetic decl. */
2252 decl
= build_decl (EXPR_LOCATION (p
),
2253 PARM_DECL
, NULL_TREE
, subtype
);
2254 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2255 DECL_ARTIFICIAL (decl
) = addressable
;
2256 DECL_IGNORED_P (decl
) = addressable
;
2257 layout_decl (decl
, 0);
2258 VEC_safe_insert (tree
, heap
, *args
, ++i
, decl
);
2263 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2264 the hidden struct return argument, and (abi willing) complex args.
2265 Return the new parameter list. */
2267 static VEC(tree
, heap
) *
2268 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2270 tree fndecl
= current_function_decl
;
2271 tree fntype
= TREE_TYPE (fndecl
);
2272 VEC(tree
, heap
) *fnargs
= NULL
;
2275 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2276 VEC_safe_push (tree
, heap
, fnargs
, arg
);
2278 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2280 /* If struct value address is treated as the first argument, make it so. */
2281 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2282 && ! cfun
->returns_pcc_struct
2283 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2285 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2288 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2289 PARM_DECL
, get_identifier (".result_ptr"), type
);
2290 DECL_ARG_TYPE (decl
) = type
;
2291 DECL_ARTIFICIAL (decl
) = 1;
2292 DECL_NAMELESS (decl
) = 1;
2293 TREE_CONSTANT (decl
) = 1;
2295 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2296 all
->orig_fnargs
= decl
;
2297 VEC_safe_insert (tree
, heap
, fnargs
, 0, decl
);
2299 all
->function_result_decl
= decl
;
2302 /* If the target wants to split complex arguments into scalars, do so. */
2303 if (targetm
.calls
.split_complex_arg
)
2304 split_complex_args (&fnargs
);
2309 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2310 data for the parameter. Incorporate ABI specifics such as pass-by-
2311 reference and type promotion. */
2314 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2315 struct assign_parm_data_one
*data
)
2317 tree nominal_type
, passed_type
;
2318 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2321 memset (data
, 0, sizeof (*data
));
2323 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2325 data
->named_arg
= 1; /* No variadic parms. */
2326 else if (DECL_CHAIN (parm
))
2327 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2328 else if (targetm
.calls
.strict_argument_naming (&all
->args_so_far
))
2329 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2331 data
->named_arg
= 0; /* Treat as variadic. */
2333 nominal_type
= TREE_TYPE (parm
);
2334 passed_type
= DECL_ARG_TYPE (parm
);
2336 /* Look out for errors propagating this far. Also, if the parameter's
2337 type is void then its value doesn't matter. */
2338 if (TREE_TYPE (parm
) == error_mark_node
2339 /* This can happen after weird syntax errors
2340 or if an enum type is defined among the parms. */
2341 || TREE_CODE (parm
) != PARM_DECL
2342 || passed_type
== NULL
2343 || VOID_TYPE_P (nominal_type
))
2345 nominal_type
= passed_type
= void_type_node
;
2346 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2350 /* Find mode of arg as it is passed, and mode of arg as it should be
2351 during execution of this function. */
2352 passed_mode
= TYPE_MODE (passed_type
);
2353 nominal_mode
= TYPE_MODE (nominal_type
);
2355 /* If the parm is to be passed as a transparent union or record, use the
2356 type of the first field for the tests below. We have already verified
2357 that the modes are the same. */
2358 if ((TREE_CODE (passed_type
) == UNION_TYPE
2359 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2360 && TYPE_TRANSPARENT_AGGR (passed_type
))
2361 passed_type
= TREE_TYPE (first_field (passed_type
));
2363 /* See if this arg was passed by invisible reference. */
2364 if (pass_by_reference (&all
->args_so_far
, passed_mode
,
2365 passed_type
, data
->named_arg
))
2367 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2368 data
->passed_pointer
= true;
2369 passed_mode
= nominal_mode
= Pmode
;
2372 /* Find mode as it is passed by the ABI. */
2373 unsignedp
= TYPE_UNSIGNED (passed_type
);
2374 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2375 TREE_TYPE (current_function_decl
), 0);
2378 data
->nominal_type
= nominal_type
;
2379 data
->passed_type
= passed_type
;
2380 data
->nominal_mode
= nominal_mode
;
2381 data
->passed_mode
= passed_mode
;
2382 data
->promoted_mode
= promoted_mode
;
2385 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2388 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2389 struct assign_parm_data_one
*data
, bool no_rtl
)
2391 int varargs_pretend_bytes
= 0;
2393 targetm
.calls
.setup_incoming_varargs (&all
->args_so_far
,
2394 data
->promoted_mode
,
2396 &varargs_pretend_bytes
, no_rtl
);
2398 /* If the back-end has requested extra stack space, record how much is
2399 needed. Do not change pretend_args_size otherwise since it may be
2400 nonzero from an earlier partial argument. */
2401 if (varargs_pretend_bytes
> 0)
2402 all
->pretend_args_size
= varargs_pretend_bytes
;
2405 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2406 the incoming location of the current parameter. */
2409 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2410 struct assign_parm_data_one
*data
)
2412 HOST_WIDE_INT pretend_bytes
= 0;
2416 if (data
->promoted_mode
== VOIDmode
)
2418 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2422 entry_parm
= targetm
.calls
.function_incoming_arg (&all
->args_so_far
,
2423 data
->promoted_mode
,
2427 if (entry_parm
== 0)
2428 data
->promoted_mode
= data
->passed_mode
;
2430 /* Determine parm's home in the stack, in case it arrives in the stack
2431 or we should pretend it did. Compute the stack position and rtx where
2432 the argument arrives and its size.
2434 There is one complexity here: If this was a parameter that would
2435 have been passed in registers, but wasn't only because it is
2436 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2437 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2438 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2439 as it was the previous time. */
2440 in_regs
= entry_parm
!= 0;
2441 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2444 if (!in_regs
&& !data
->named_arg
)
2446 if (targetm
.calls
.pretend_outgoing_varargs_named (&all
->args_so_far
))
2449 tem
= targetm
.calls
.function_incoming_arg (&all
->args_so_far
,
2450 data
->promoted_mode
,
2451 data
->passed_type
, true);
2452 in_regs
= tem
!= NULL
;
2456 /* If this parameter was passed both in registers and in the stack, use
2457 the copy on the stack. */
2458 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2466 partial
= targetm
.calls
.arg_partial_bytes (&all
->args_so_far
,
2467 data
->promoted_mode
,
2470 data
->partial
= partial
;
2472 /* The caller might already have allocated stack space for the
2473 register parameters. */
2474 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2476 /* Part of this argument is passed in registers and part
2477 is passed on the stack. Ask the prologue code to extend
2478 the stack part so that we can recreate the full value.
2480 PRETEND_BYTES is the size of the registers we need to store.
2481 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2482 stack space that the prologue should allocate.
2484 Internally, gcc assumes that the argument pointer is aligned
2485 to STACK_BOUNDARY bits. This is used both for alignment
2486 optimizations (see init_emit) and to locate arguments that are
2487 aligned to more than PARM_BOUNDARY bits. We must preserve this
2488 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2489 a stack boundary. */
2491 /* We assume at most one partial arg, and it must be the first
2492 argument on the stack. */
2493 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2495 pretend_bytes
= partial
;
2496 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2498 /* We want to align relative to the actual stack pointer, so
2499 don't include this in the stack size until later. */
2500 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2504 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2505 entry_parm
? data
->partial
: 0, current_function_decl
,
2506 &all
->stack_args_size
, &data
->locate
);
2508 /* Update parm_stack_boundary if this parameter is passed in the
2510 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2511 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2513 /* Adjust offsets to include the pretend args. */
2514 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2515 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2516 data
->locate
.offset
.constant
+= pretend_bytes
;
2518 data
->entry_parm
= entry_parm
;
2521 /* A subroutine of assign_parms. If there is actually space on the stack
2522 for this parm, count it in stack_args_size and return true. */
2525 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2526 struct assign_parm_data_one
*data
)
2528 /* Trivially true if we've no incoming register. */
2529 if (data
->entry_parm
== NULL
)
2531 /* Also true if we're partially in registers and partially not,
2532 since we've arranged to drop the entire argument on the stack. */
2533 else if (data
->partial
!= 0)
2535 /* Also true if the target says that it's passed in both registers
2536 and on the stack. */
2537 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2538 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2540 /* Also true if the target says that there's stack allocated for
2541 all register parameters. */
2542 else if (all
->reg_parm_stack_space
> 0)
2544 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2548 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2549 if (data
->locate
.size
.var
)
2550 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2555 /* A subroutine of assign_parms. Given that this parameter is allocated
2556 stack space by the ABI, find it. */
2559 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2561 rtx offset_rtx
, stack_parm
;
2562 unsigned int align
, boundary
;
2564 /* If we're passing this arg using a reg, make its stack home the
2565 aligned stack slot. */
2566 if (data
->entry_parm
)
2567 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2569 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2571 stack_parm
= crtl
->args
.internal_arg_pointer
;
2572 if (offset_rtx
!= const0_rtx
)
2573 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2574 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2576 if (!data
->passed_pointer
)
2578 set_mem_attributes (stack_parm
, parm
, 1);
2579 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2580 while promoted mode's size is needed. */
2581 if (data
->promoted_mode
!= BLKmode
2582 && data
->promoted_mode
!= DECL_MODE (parm
))
2584 set_mem_size (stack_parm
,
2585 GEN_INT (GET_MODE_SIZE (data
->promoted_mode
)));
2586 if (MEM_EXPR (stack_parm
) && MEM_OFFSET (stack_parm
))
2588 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2589 data
->promoted_mode
);
2591 set_mem_offset (stack_parm
,
2592 plus_constant (MEM_OFFSET (stack_parm
),
2598 boundary
= data
->locate
.boundary
;
2599 align
= BITS_PER_UNIT
;
2601 /* If we're padding upward, we know that the alignment of the slot
2602 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2603 intentionally forcing upward padding. Otherwise we have to come
2604 up with a guess at the alignment based on OFFSET_RTX. */
2605 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2607 else if (CONST_INT_P (offset_rtx
))
2609 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2610 align
= align
& -align
;
2612 set_mem_align (stack_parm
, align
);
2614 if (data
->entry_parm
)
2615 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2617 data
->stack_parm
= stack_parm
;
2620 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2621 always valid and contiguous. */
2624 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2626 rtx entry_parm
= data
->entry_parm
;
2627 rtx stack_parm
= data
->stack_parm
;
2629 /* If this parm was passed part in regs and part in memory, pretend it
2630 arrived entirely in memory by pushing the register-part onto the stack.
2631 In the special case of a DImode or DFmode that is split, we could put
2632 it together in a pseudoreg directly, but for now that's not worth
2634 if (data
->partial
!= 0)
2636 /* Handle calls that pass values in multiple non-contiguous
2637 locations. The Irix 6 ABI has examples of this. */
2638 if (GET_CODE (entry_parm
) == PARALLEL
)
2639 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2641 int_size_in_bytes (data
->passed_type
));
2644 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2645 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2646 data
->partial
/ UNITS_PER_WORD
);
2649 entry_parm
= stack_parm
;
2652 /* If we didn't decide this parm came in a register, by default it came
2654 else if (entry_parm
== NULL
)
2655 entry_parm
= stack_parm
;
2657 /* When an argument is passed in multiple locations, we can't make use
2658 of this information, but we can save some copying if the whole argument
2659 is passed in a single register. */
2660 else if (GET_CODE (entry_parm
) == PARALLEL
2661 && data
->nominal_mode
!= BLKmode
2662 && data
->passed_mode
!= BLKmode
)
2664 size_t i
, len
= XVECLEN (entry_parm
, 0);
2666 for (i
= 0; i
< len
; i
++)
2667 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2668 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2669 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2670 == data
->passed_mode
)
2671 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2673 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2678 data
->entry_parm
= entry_parm
;
2681 /* A subroutine of assign_parms. Reconstitute any values which were
2682 passed in multiple registers and would fit in a single register. */
2685 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2687 rtx entry_parm
= data
->entry_parm
;
2689 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2690 This can be done with register operations rather than on the
2691 stack, even if we will store the reconstituted parameter on the
2693 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2695 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2696 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2697 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2698 entry_parm
= parmreg
;
2701 data
->entry_parm
= entry_parm
;
2704 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2705 always valid and properly aligned. */
2708 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2710 rtx stack_parm
= data
->stack_parm
;
2712 /* If we can't trust the parm stack slot to be aligned enough for its
2713 ultimate type, don't use that slot after entry. We'll make another
2714 stack slot, if we need one. */
2716 && ((STRICT_ALIGNMENT
2717 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2718 || (data
->nominal_type
2719 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2720 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2723 /* If parm was passed in memory, and we need to convert it on entry,
2724 don't store it back in that same slot. */
2725 else if (data
->entry_parm
== stack_parm
2726 && data
->nominal_mode
!= BLKmode
2727 && data
->nominal_mode
!= data
->passed_mode
)
2730 /* If stack protection is in effect for this function, don't leave any
2731 pointers in their passed stack slots. */
2732 else if (crtl
->stack_protect_guard
2733 && (flag_stack_protect
== 2
2734 || data
->passed_pointer
2735 || POINTER_TYPE_P (data
->nominal_type
)))
2738 data
->stack_parm
= stack_parm
;
2741 /* A subroutine of assign_parms. Return true if the current parameter
2742 should be stored as a BLKmode in the current frame. */
2745 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2747 if (data
->nominal_mode
== BLKmode
)
2749 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2752 #ifdef BLOCK_REG_PADDING
2753 /* Only assign_parm_setup_block knows how to deal with register arguments
2754 that are padded at the least significant end. */
2755 if (REG_P (data
->entry_parm
)
2756 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2757 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2758 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2765 /* A subroutine of assign_parms. Arrange for the parameter to be
2766 present and valid in DATA->STACK_RTL. */
2769 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2770 tree parm
, struct assign_parm_data_one
*data
)
2772 rtx entry_parm
= data
->entry_parm
;
2773 rtx stack_parm
= data
->stack_parm
;
2775 HOST_WIDE_INT size_stored
;
2777 if (GET_CODE (entry_parm
) == PARALLEL
)
2778 entry_parm
= emit_group_move_into_temps (entry_parm
);
2780 size
= int_size_in_bytes (data
->passed_type
);
2781 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2782 if (stack_parm
== 0)
2784 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2785 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2787 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2788 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2789 set_mem_attributes (stack_parm
, parm
, 1);
2792 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2793 calls that pass values in multiple non-contiguous locations. */
2794 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2798 /* Note that we will be storing an integral number of words.
2799 So we have to be careful to ensure that we allocate an
2800 integral number of words. We do this above when we call
2801 assign_stack_local if space was not allocated in the argument
2802 list. If it was, this will not work if PARM_BOUNDARY is not
2803 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2804 if it becomes a problem. Exception is when BLKmode arrives
2805 with arguments not conforming to word_mode. */
2807 if (data
->stack_parm
== 0)
2809 else if (GET_CODE (entry_parm
) == PARALLEL
)
2812 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2814 mem
= validize_mem (stack_parm
);
2816 /* Handle values in multiple non-contiguous locations. */
2817 if (GET_CODE (entry_parm
) == PARALLEL
)
2819 push_to_sequence2 (all
->first_conversion_insn
,
2820 all
->last_conversion_insn
);
2821 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2822 all
->first_conversion_insn
= get_insns ();
2823 all
->last_conversion_insn
= get_last_insn ();
2830 /* If SIZE is that of a mode no bigger than a word, just use
2831 that mode's store operation. */
2832 else if (size
<= UNITS_PER_WORD
)
2834 enum machine_mode mode
2835 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2838 #ifdef BLOCK_REG_PADDING
2839 && (size
== UNITS_PER_WORD
2840 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2841 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2847 /* We are really truncating a word_mode value containing
2848 SIZE bytes into a value of mode MODE. If such an
2849 operation requires no actual instructions, we can refer
2850 to the value directly in mode MODE, otherwise we must
2851 start with the register in word_mode and explicitly
2853 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2854 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2857 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2858 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2860 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2863 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2864 machine must be aligned to the left before storing
2865 to memory. Note that the previous test doesn't
2866 handle all cases (e.g. SIZE == 3). */
2867 else if (size
!= UNITS_PER_WORD
2868 #ifdef BLOCK_REG_PADDING
2869 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2877 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2878 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2880 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
2881 build_int_cst (NULL_TREE
, by
),
2883 tem
= change_address (mem
, word_mode
, 0);
2884 emit_move_insn (tem
, x
);
2887 move_block_from_reg (REGNO (entry_parm
), mem
,
2888 size_stored
/ UNITS_PER_WORD
);
2891 move_block_from_reg (REGNO (entry_parm
), mem
,
2892 size_stored
/ UNITS_PER_WORD
);
2894 else if (data
->stack_parm
== 0)
2896 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2897 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2899 all
->first_conversion_insn
= get_insns ();
2900 all
->last_conversion_insn
= get_last_insn ();
2904 data
->stack_parm
= stack_parm
;
2905 SET_DECL_RTL (parm
, stack_parm
);
2908 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2909 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2910 which is pointed to by DATA. */
2912 record_hard_reg_sets (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
2914 HARD_REG_SET
*pset
= (HARD_REG_SET
*)data
;
2915 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2917 int nregs
= hard_regno_nregs
[REGNO (x
)][GET_MODE (x
)];
2919 SET_HARD_REG_BIT (*pset
, REGNO (x
) + nregs
);
2923 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2924 parameter. Get it there. Perform all ABI specified conversions. */
2927 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2928 struct assign_parm_data_one
*data
)
2930 rtx parmreg
, validated_mem
;
2931 rtx equiv_stack_parm
;
2932 enum machine_mode promoted_nominal_mode
;
2933 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2934 bool did_conversion
= false;
2935 bool need_conversion
, moved
;
2937 /* Store the parm in a pseudoregister during the function, but we may
2938 need to do it in a wider mode. Using 2 here makes the result
2939 consistent with promote_decl_mode and thus expand_expr_real_1. */
2940 promoted_nominal_mode
2941 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2942 TREE_TYPE (current_function_decl
), 2);
2944 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2946 if (!DECL_ARTIFICIAL (parm
))
2947 mark_user_reg (parmreg
);
2949 /* If this was an item that we received a pointer to,
2950 set DECL_RTL appropriately. */
2951 if (data
->passed_pointer
)
2953 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2954 set_mem_attributes (x
, parm
, 1);
2955 SET_DECL_RTL (parm
, x
);
2958 SET_DECL_RTL (parm
, parmreg
);
2960 assign_parm_remove_parallels (data
);
2962 /* Copy the value into the register, thus bridging between
2963 assign_parm_find_data_types and expand_expr_real_1. */
2965 equiv_stack_parm
= data
->stack_parm
;
2966 validated_mem
= validize_mem (data
->entry_parm
);
2968 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2969 || promoted_nominal_mode
!= data
->promoted_mode
);
2973 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2974 && data
->nominal_mode
== data
->passed_mode
2975 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2977 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2978 mode, by the caller. We now have to convert it to
2979 NOMINAL_MODE, if different. However, PARMREG may be in
2980 a different mode than NOMINAL_MODE if it is being stored
2983 If ENTRY_PARM is a hard register, it might be in a register
2984 not valid for operating in its mode (e.g., an odd-numbered
2985 register for a DFmode). In that case, moves are the only
2986 thing valid, so we can't do a convert from there. This
2987 occurs when the calling sequence allow such misaligned
2990 In addition, the conversion may involve a call, which could
2991 clobber parameters which haven't been copied to pseudo
2994 First, we try to emit an insn which performs the necessary
2995 conversion. We verify that this insn does not clobber any
2998 enum insn_code icode
;
3001 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3005 op1
= validated_mem
;
3006 if (icode
!= CODE_FOR_nothing
3007 && insn_operand_matches (icode
, 0, op0
)
3008 && insn_operand_matches (icode
, 1, op1
))
3010 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3012 HARD_REG_SET hardregs
;
3015 insn
= gen_extend_insn (op0
, op1
, promoted_nominal_mode
,
3016 data
->passed_mode
, unsignedp
);
3018 insns
= get_insns ();
3021 CLEAR_HARD_REG_SET (hardregs
);
3022 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3025 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3027 if (!hard_reg_set_empty_p (hardregs
))
3036 if (equiv_stack_parm
!= NULL_RTX
)
3037 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3044 /* Nothing to do. */
3046 else if (need_conversion
)
3048 /* We did not have an insn to convert directly, or the sequence
3049 generated appeared unsafe. We must first copy the parm to a
3050 pseudo reg, and save the conversion until after all
3051 parameters have been moved. */
3054 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3056 emit_move_insn (tempreg
, validated_mem
);
3058 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3059 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3061 if (GET_CODE (tempreg
) == SUBREG
3062 && GET_MODE (tempreg
) == data
->nominal_mode
3063 && REG_P (SUBREG_REG (tempreg
))
3064 && data
->nominal_mode
== data
->passed_mode
3065 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3066 && GET_MODE_SIZE (GET_MODE (tempreg
))
3067 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3069 /* The argument is already sign/zero extended, so note it
3071 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3072 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3075 /* TREE_USED gets set erroneously during expand_assignment. */
3076 save_tree_used
= TREE_USED (parm
);
3077 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3078 TREE_USED (parm
) = save_tree_used
;
3079 all
->first_conversion_insn
= get_insns ();
3080 all
->last_conversion_insn
= get_last_insn ();
3083 did_conversion
= true;
3086 emit_move_insn (parmreg
, validated_mem
);
3088 /* If we were passed a pointer but the actual value can safely live
3089 in a register, put it in one. */
3090 if (data
->passed_pointer
3091 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3092 /* If by-reference argument was promoted, demote it. */
3093 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
3094 || use_register_for_decl (parm
)))
3096 /* We can't use nominal_mode, because it will have been set to
3097 Pmode above. We must use the actual mode of the parm. */
3098 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3099 mark_user_reg (parmreg
);
3101 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3103 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3104 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3106 push_to_sequence2 (all
->first_conversion_insn
,
3107 all
->last_conversion_insn
);
3108 emit_move_insn (tempreg
, DECL_RTL (parm
));
3109 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3110 emit_move_insn (parmreg
, tempreg
);
3111 all
->first_conversion_insn
= get_insns ();
3112 all
->last_conversion_insn
= get_last_insn ();
3115 did_conversion
= true;
3118 emit_move_insn (parmreg
, DECL_RTL (parm
));
3120 SET_DECL_RTL (parm
, parmreg
);
3122 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3124 data
->stack_parm
= NULL
;
3127 /* Mark the register as eliminable if we did no conversion and it was
3128 copied from memory at a fixed offset, and the arg pointer was not
3129 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3130 offset formed an invalid address, such memory-equivalences as we
3131 make here would screw up life analysis for it. */
3132 if (data
->nominal_mode
== data
->passed_mode
3134 && data
->stack_parm
!= 0
3135 && MEM_P (data
->stack_parm
)
3136 && data
->locate
.offset
.var
== 0
3137 && reg_mentioned_p (virtual_incoming_args_rtx
,
3138 XEXP (data
->stack_parm
, 0)))
3140 rtx linsn
= get_last_insn ();
3143 /* Mark complex types separately. */
3144 if (GET_CODE (parmreg
) == CONCAT
)
3146 enum machine_mode submode
3147 = GET_MODE_INNER (GET_MODE (parmreg
));
3148 int regnor
= REGNO (XEXP (parmreg
, 0));
3149 int regnoi
= REGNO (XEXP (parmreg
, 1));
3150 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3151 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3152 GET_MODE_SIZE (submode
));
3154 /* Scan backwards for the set of the real and
3156 for (sinsn
= linsn
; sinsn
!= 0;
3157 sinsn
= prev_nonnote_insn (sinsn
))
3159 set
= single_set (sinsn
);
3163 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3164 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3165 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3166 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3169 else if ((set
= single_set (linsn
)) != 0
3170 && SET_DEST (set
) == parmreg
)
3171 set_unique_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
);
3174 /* For pointer data type, suggest pointer register. */
3175 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3176 mark_reg_pointer (parmreg
,
3177 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3180 /* A subroutine of assign_parms. Allocate stack space to hold the current
3181 parameter. Get it there. Perform all ABI specified conversions. */
3184 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3185 struct assign_parm_data_one
*data
)
3187 /* Value must be stored in the stack slot STACK_PARM during function
3189 bool to_conversion
= false;
3191 assign_parm_remove_parallels (data
);
3193 if (data
->promoted_mode
!= data
->nominal_mode
)
3195 /* Conversion is required. */
3196 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3198 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3200 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3201 to_conversion
= true;
3203 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3204 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3206 if (data
->stack_parm
)
3208 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3209 GET_MODE (data
->stack_parm
));
3210 /* ??? This may need a big-endian conversion on sparc64. */
3212 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3213 if (offset
&& MEM_OFFSET (data
->stack_parm
))
3214 set_mem_offset (data
->stack_parm
,
3215 plus_constant (MEM_OFFSET (data
->stack_parm
),
3220 if (data
->entry_parm
!= data
->stack_parm
)
3224 if (data
->stack_parm
== 0)
3226 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3227 GET_MODE (data
->entry_parm
),
3228 TYPE_ALIGN (data
->passed_type
));
3230 = assign_stack_local (GET_MODE (data
->entry_parm
),
3231 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3233 set_mem_attributes (data
->stack_parm
, parm
, 1);
3236 dest
= validize_mem (data
->stack_parm
);
3237 src
= validize_mem (data
->entry_parm
);
3241 /* Use a block move to handle potentially misaligned entry_parm. */
3243 push_to_sequence2 (all
->first_conversion_insn
,
3244 all
->last_conversion_insn
);
3245 to_conversion
= true;
3247 emit_block_move (dest
, src
,
3248 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3252 emit_move_insn (dest
, src
);
3257 all
->first_conversion_insn
= get_insns ();
3258 all
->last_conversion_insn
= get_last_insn ();
3262 SET_DECL_RTL (parm
, data
->stack_parm
);
3265 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3266 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3269 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3270 VEC(tree
, heap
) *fnargs
)
3273 tree orig_fnargs
= all
->orig_fnargs
;
3276 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3278 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3279 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3281 rtx tmp
, real
, imag
;
3282 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3284 real
= DECL_RTL (VEC_index (tree
, fnargs
, i
));
3285 imag
= DECL_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3286 if (inner
!= GET_MODE (real
))
3288 real
= gen_lowpart_SUBREG (inner
, real
);
3289 imag
= gen_lowpart_SUBREG (inner
, imag
);
3292 if (TREE_ADDRESSABLE (parm
))
3295 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3296 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3298 TYPE_ALIGN (TREE_TYPE (parm
)));
3300 /* split_complex_arg put the real and imag parts in
3301 pseudos. Move them to memory. */
3302 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3303 set_mem_attributes (tmp
, parm
, 1);
3304 rmem
= adjust_address_nv (tmp
, inner
, 0);
3305 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3306 push_to_sequence2 (all
->first_conversion_insn
,
3307 all
->last_conversion_insn
);
3308 emit_move_insn (rmem
, real
);
3309 emit_move_insn (imem
, imag
);
3310 all
->first_conversion_insn
= get_insns ();
3311 all
->last_conversion_insn
= get_last_insn ();
3315 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3316 SET_DECL_RTL (parm
, tmp
);
3318 real
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
));
3319 imag
= DECL_INCOMING_RTL (VEC_index (tree
, fnargs
, i
+ 1));
3320 if (inner
!= GET_MODE (real
))
3322 real
= gen_lowpart_SUBREG (inner
, real
);
3323 imag
= gen_lowpart_SUBREG (inner
, imag
);
3325 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3326 set_decl_incoming_rtl (parm
, tmp
, false);
3332 /* Assign RTL expressions to the function's parameters. This may involve
3333 copying them into registers and using those registers as the DECL_RTL. */
3336 assign_parms (tree fndecl
)
3338 struct assign_parm_data_all all
;
3340 VEC(tree
, heap
) *fnargs
;
3343 crtl
->args
.internal_arg_pointer
3344 = targetm
.calls
.internal_arg_pointer ();
3346 assign_parms_initialize_all (&all
);
3347 fnargs
= assign_parms_augmented_arg_list (&all
);
3349 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3351 struct assign_parm_data_one data
;
3353 /* Extract the type of PARM; adjust it according to ABI. */
3354 assign_parm_find_data_types (&all
, parm
, &data
);
3356 /* Early out for errors and void parameters. */
3357 if (data
.passed_mode
== VOIDmode
)
3359 SET_DECL_RTL (parm
, const0_rtx
);
3360 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3364 /* Estimate stack alignment from parameter alignment. */
3365 if (SUPPORTS_STACK_ALIGNMENT
)
3368 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3370 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3372 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3373 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3374 TYPE_MODE (data
.nominal_type
),
3375 TYPE_ALIGN (data
.nominal_type
));
3376 if (crtl
->stack_alignment_estimated
< align
)
3378 gcc_assert (!crtl
->stack_realign_processed
);
3379 crtl
->stack_alignment_estimated
= align
;
3383 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3384 assign_parms_setup_varargs (&all
, &data
, false);
3386 /* Find out where the parameter arrives in this function. */
3387 assign_parm_find_entry_rtl (&all
, &data
);
3389 /* Find out where stack space for this parameter might be. */
3390 if (assign_parm_is_stack_parm (&all
, &data
))
3392 assign_parm_find_stack_rtl (parm
, &data
);
3393 assign_parm_adjust_entry_rtl (&data
);
3396 /* Record permanently how this parm was passed. */
3397 if (data
.passed_pointer
)
3400 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3402 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3405 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3407 /* Update info on where next arg arrives in registers. */
3408 targetm
.calls
.function_arg_advance (&all
.args_so_far
, data
.promoted_mode
,
3409 data
.passed_type
, data
.named_arg
);
3411 assign_parm_adjust_stack_rtl (&data
);
3413 if (assign_parm_setup_block_p (&data
))
3414 assign_parm_setup_block (&all
, parm
, &data
);
3415 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3416 assign_parm_setup_reg (&all
, parm
, &data
);
3418 assign_parm_setup_stack (&all
, parm
, &data
);
3421 if (targetm
.calls
.split_complex_arg
)
3422 assign_parms_unsplit_complex (&all
, fnargs
);
3424 VEC_free (tree
, heap
, fnargs
);
3426 /* Output all parameter conversion instructions (possibly including calls)
3427 now that all parameters have been copied out of hard registers. */
3428 emit_insn (all
.first_conversion_insn
);
3430 /* Estimate reload stack alignment from scalar return mode. */
3431 if (SUPPORTS_STACK_ALIGNMENT
)
3433 if (DECL_RESULT (fndecl
))
3435 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3436 enum machine_mode mode
= TYPE_MODE (type
);
3440 && !AGGREGATE_TYPE_P (type
))
3442 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3443 if (crtl
->stack_alignment_estimated
< align
)
3445 gcc_assert (!crtl
->stack_realign_processed
);
3446 crtl
->stack_alignment_estimated
= align
;
3452 /* If we are receiving a struct value address as the first argument, set up
3453 the RTL for the function result. As this might require code to convert
3454 the transmitted address to Pmode, we do this here to ensure that possible
3455 preliminary conversions of the address have been emitted already. */
3456 if (all
.function_result_decl
)
3458 tree result
= DECL_RESULT (current_function_decl
);
3459 rtx addr
= DECL_RTL (all
.function_result_decl
);
3462 if (DECL_BY_REFERENCE (result
))
3464 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3469 SET_DECL_VALUE_EXPR (result
,
3470 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3471 all
.function_result_decl
));
3472 addr
= convert_memory_address (Pmode
, addr
);
3473 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3474 set_mem_attributes (x
, result
, 1);
3477 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3479 SET_DECL_RTL (result
, x
);
3482 /* We have aligned all the args, so add space for the pretend args. */
3483 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3484 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3485 crtl
->args
.size
= all
.stack_args_size
.constant
;
3487 /* Adjust function incoming argument size for alignment and
3490 #ifdef REG_PARM_STACK_SPACE
3491 crtl
->args
.size
= MAX (crtl
->args
.size
,
3492 REG_PARM_STACK_SPACE (fndecl
));
3495 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3496 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3498 #ifdef ARGS_GROW_DOWNWARD
3499 crtl
->args
.arg_offset_rtx
3500 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3501 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3502 size_int (-all
.stack_args_size
.constant
)),
3503 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3505 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3508 /* See how many bytes, if any, of its args a function should try to pop
3511 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3515 /* For stdarg.h function, save info about
3516 regs and stack space used by the named args. */
3518 crtl
->args
.info
= all
.args_so_far
;
3520 /* Set the rtx used for the function return value. Put this in its
3521 own variable so any optimizers that need this information don't have
3522 to include tree.h. Do this here so it gets done when an inlined
3523 function gets output. */
3526 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3527 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3529 /* If scalar return value was computed in a pseudo-reg, or was a named
3530 return value that got dumped to the stack, copy that to the hard
3532 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3534 tree decl_result
= DECL_RESULT (fndecl
);
3535 rtx decl_rtl
= DECL_RTL (decl_result
);
3537 if (REG_P (decl_rtl
)
3538 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3539 : DECL_REGISTER (decl_result
))
3543 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3545 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3546 /* The delay slot scheduler assumes that crtl->return_rtx
3547 holds the hard register containing the return value, not a
3548 temporary pseudo. */
3549 crtl
->return_rtx
= real_decl_rtl
;
3554 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3555 For all seen types, gimplify their sizes. */
3558 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3565 if (POINTER_TYPE_P (t
))
3567 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3568 && !TYPE_SIZES_GIMPLIFIED (t
))
3570 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3578 /* Gimplify the parameter list for current_function_decl. This involves
3579 evaluating SAVE_EXPRs of variable sized parameters and generating code
3580 to implement callee-copies reference parameters. Returns a sequence of
3581 statements to add to the beginning of the function. */
3584 gimplify_parameters (void)
3586 struct assign_parm_data_all all
;
3588 gimple_seq stmts
= NULL
;
3589 VEC(tree
, heap
) *fnargs
;
3592 assign_parms_initialize_all (&all
);
3593 fnargs
= assign_parms_augmented_arg_list (&all
);
3595 FOR_EACH_VEC_ELT (tree
, fnargs
, i
, parm
)
3597 struct assign_parm_data_one data
;
3599 /* Extract the type of PARM; adjust it according to ABI. */
3600 assign_parm_find_data_types (&all
, parm
, &data
);
3602 /* Early out for errors and void parameters. */
3603 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3606 /* Update info on where next arg arrives in registers. */
3607 targetm
.calls
.function_arg_advance (&all
.args_so_far
, data
.promoted_mode
,
3608 data
.passed_type
, data
.named_arg
);
3610 /* ??? Once upon a time variable_size stuffed parameter list
3611 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3612 turned out to be less than manageable in the gimple world.
3613 Now we have to hunt them down ourselves. */
3614 walk_tree_without_duplicates (&data
.passed_type
,
3615 gimplify_parm_type
, &stmts
);
3617 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3619 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3620 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3623 if (data
.passed_pointer
)
3625 tree type
= TREE_TYPE (data
.passed_type
);
3626 if (reference_callee_copied (&all
.args_so_far
, TYPE_MODE (type
),
3627 type
, data
.named_arg
))
3631 /* For constant-sized objects, this is trivial; for
3632 variable-sized objects, we have to play games. */
3633 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3634 && !(flag_stack_check
== GENERIC_STACK_CHECK
3635 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3636 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3638 local
= create_tmp_reg (type
, get_name (parm
));
3639 DECL_IGNORED_P (local
) = 0;
3640 /* If PARM was addressable, move that flag over
3641 to the local copy, as its address will be taken,
3642 not the PARMs. Keep the parms address taken
3643 as we'll query that flag during gimplification. */
3644 if (TREE_ADDRESSABLE (parm
))
3645 TREE_ADDRESSABLE (local
) = 1;
3649 tree ptr_type
, addr
;
3651 ptr_type
= build_pointer_type (type
);
3652 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3653 DECL_IGNORED_P (addr
) = 0;
3654 local
= build_fold_indirect_ref (addr
);
3656 t
= built_in_decls
[BUILT_IN_ALLOCA
];
3657 t
= build_call_expr (t
, 1, DECL_SIZE_UNIT (parm
));
3658 /* The call has been built for a variable-sized object. */
3659 ALLOCA_FOR_VAR_P (t
) = 1;
3660 t
= fold_convert (ptr_type
, t
);
3661 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3662 gimplify_and_add (t
, &stmts
);
3665 gimplify_assign (local
, parm
, &stmts
);
3667 SET_DECL_VALUE_EXPR (parm
, local
);
3668 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3673 VEC_free (tree
, heap
, fnargs
);
3678 /* Compute the size and offset from the start of the stacked arguments for a
3679 parm passed in mode PASSED_MODE and with type TYPE.
3681 INITIAL_OFFSET_PTR points to the current offset into the stacked
3684 The starting offset and size for this parm are returned in
3685 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3686 nonzero, the offset is that of stack slot, which is returned in
3687 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3688 padding required from the initial offset ptr to the stack slot.
3690 IN_REGS is nonzero if the argument will be passed in registers. It will
3691 never be set if REG_PARM_STACK_SPACE is not defined.
3693 FNDECL is the function in which the argument was defined.
3695 There are two types of rounding that are done. The first, controlled by
3696 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3697 argument list to be aligned to the specific boundary (in bits). This
3698 rounding affects the initial and starting offsets, but not the argument
3701 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3702 optionally rounds the size of the parm to PARM_BOUNDARY. The
3703 initial offset is not affected by this rounding, while the size always
3704 is and the starting offset may be. */
3706 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3707 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3708 callers pass in the total size of args so far as
3709 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3712 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3713 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3714 struct args_size
*initial_offset_ptr
,
3715 struct locate_and_pad_arg_data
*locate
)
3718 enum direction where_pad
;
3719 unsigned int boundary
;
3720 int reg_parm_stack_space
= 0;
3721 int part_size_in_regs
;
3723 #ifdef REG_PARM_STACK_SPACE
3724 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3726 /* If we have found a stack parm before we reach the end of the
3727 area reserved for registers, skip that area. */
3730 if (reg_parm_stack_space
> 0)
3732 if (initial_offset_ptr
->var
)
3734 initial_offset_ptr
->var
3735 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3736 ssize_int (reg_parm_stack_space
));
3737 initial_offset_ptr
->constant
= 0;
3739 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3740 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3743 #endif /* REG_PARM_STACK_SPACE */
3745 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3748 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3749 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3750 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3751 locate
->where_pad
= where_pad
;
3753 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3754 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3755 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3757 locate
->boundary
= boundary
;
3759 if (SUPPORTS_STACK_ALIGNMENT
)
3761 /* stack_alignment_estimated can't change after stack has been
3763 if (crtl
->stack_alignment_estimated
< boundary
)
3765 if (!crtl
->stack_realign_processed
)
3766 crtl
->stack_alignment_estimated
= boundary
;
3769 /* If stack is realigned and stack alignment value
3770 hasn't been finalized, it is OK not to increase
3771 stack_alignment_estimated. The bigger alignment
3772 requirement is recorded in stack_alignment_needed
3774 gcc_assert (!crtl
->stack_realign_finalized
3775 && crtl
->stack_realign_needed
);
3780 /* Remember if the outgoing parameter requires extra alignment on the
3781 calling function side. */
3782 if (crtl
->stack_alignment_needed
< boundary
)
3783 crtl
->stack_alignment_needed
= boundary
;
3784 if (crtl
->preferred_stack_boundary
< boundary
)
3785 crtl
->preferred_stack_boundary
= boundary
;
3787 #ifdef ARGS_GROW_DOWNWARD
3788 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3789 if (initial_offset_ptr
->var
)
3790 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3791 initial_offset_ptr
->var
);
3795 if (where_pad
!= none
3796 && (!host_integerp (sizetree
, 1)
3797 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3798 s2
= round_up (s2
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3799 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3802 locate
->slot_offset
.constant
+= part_size_in_regs
;
3805 #ifdef REG_PARM_STACK_SPACE
3806 || REG_PARM_STACK_SPACE (fndecl
) > 0
3809 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3810 &locate
->alignment_pad
);
3812 locate
->size
.constant
= (-initial_offset_ptr
->constant
3813 - locate
->slot_offset
.constant
);
3814 if (initial_offset_ptr
->var
)
3815 locate
->size
.var
= size_binop (MINUS_EXPR
,
3816 size_binop (MINUS_EXPR
,
3818 initial_offset_ptr
->var
),
3819 locate
->slot_offset
.var
);
3821 /* Pad_below needs the pre-rounded size to know how much to pad
3823 locate
->offset
= locate
->slot_offset
;
3824 if (where_pad
== downward
)
3825 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3827 #else /* !ARGS_GROW_DOWNWARD */
3829 #ifdef REG_PARM_STACK_SPACE
3830 || REG_PARM_STACK_SPACE (fndecl
) > 0
3833 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3834 &locate
->alignment_pad
);
3835 locate
->slot_offset
= *initial_offset_ptr
;
3837 #ifdef PUSH_ROUNDING
3838 if (passed_mode
!= BLKmode
)
3839 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3842 /* Pad_below needs the pre-rounded size to know how much to pad below
3843 so this must be done before rounding up. */
3844 locate
->offset
= locate
->slot_offset
;
3845 if (where_pad
== downward
)
3846 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3848 if (where_pad
!= none
3849 && (!host_integerp (sizetree
, 1)
3850 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % PARM_BOUNDARY
))
3851 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3853 ADD_PARM_SIZE (locate
->size
, sizetree
);
3855 locate
->size
.constant
-= part_size_in_regs
;
3856 #endif /* ARGS_GROW_DOWNWARD */
3858 #ifdef FUNCTION_ARG_OFFSET
3859 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3863 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3864 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3867 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3868 struct args_size
*alignment_pad
)
3870 tree save_var
= NULL_TREE
;
3871 HOST_WIDE_INT save_constant
= 0;
3872 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3873 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3875 #ifdef SPARC_STACK_BOUNDARY_HACK
3876 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3877 the real alignment of %sp. However, when it does this, the
3878 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3879 if (SPARC_STACK_BOUNDARY_HACK
)
3883 if (boundary
> PARM_BOUNDARY
)
3885 save_var
= offset_ptr
->var
;
3886 save_constant
= offset_ptr
->constant
;
3889 alignment_pad
->var
= NULL_TREE
;
3890 alignment_pad
->constant
= 0;
3892 if (boundary
> BITS_PER_UNIT
)
3894 if (offset_ptr
->var
)
3896 tree sp_offset_tree
= ssize_int (sp_offset
);
3897 tree offset
= size_binop (PLUS_EXPR
,
3898 ARGS_SIZE_TREE (*offset_ptr
),
3900 #ifdef ARGS_GROW_DOWNWARD
3901 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3903 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3906 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3907 /* ARGS_SIZE_TREE includes constant term. */
3908 offset_ptr
->constant
= 0;
3909 if (boundary
> PARM_BOUNDARY
)
3910 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3915 offset_ptr
->constant
= -sp_offset
+
3916 #ifdef ARGS_GROW_DOWNWARD
3917 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3919 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3921 if (boundary
> PARM_BOUNDARY
)
3922 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3928 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3930 if (passed_mode
!= BLKmode
)
3932 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3933 offset_ptr
->constant
3934 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3935 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3936 - GET_MODE_SIZE (passed_mode
));
3940 if (TREE_CODE (sizetree
) != INTEGER_CST
3941 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3943 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3944 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3946 ADD_PARM_SIZE (*offset_ptr
, s2
);
3947 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3953 /* True if register REGNO was alive at a place where `setjmp' was
3954 called and was set more than once or is an argument. Such regs may
3955 be clobbered by `longjmp'. */
3958 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3960 /* There appear to be cases where some local vars never reach the
3961 backend but have bogus regnos. */
3962 if (regno
>= max_reg_num ())
3965 return ((REG_N_SETS (regno
) > 1
3966 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
3967 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3970 /* Walk the tree of blocks describing the binding levels within a
3971 function and warn about variables the might be killed by setjmp or
3972 vfork. This is done after calling flow_analysis before register
3973 allocation since that will clobber the pseudo-regs to hard
3977 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3981 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
3983 if (TREE_CODE (decl
) == VAR_DECL
3984 && DECL_RTL_SET_P (decl
)
3985 && REG_P (DECL_RTL (decl
))
3986 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3987 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
3988 " %<longjmp%> or %<vfork%>", decl
);
3991 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3992 setjmp_vars_warning (setjmp_crosses
, sub
);
3995 /* Do the appropriate part of setjmp_vars_warning
3996 but for arguments instead of local variables. */
3999 setjmp_args_warning (bitmap setjmp_crosses
)
4002 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4003 decl
; decl
= DECL_CHAIN (decl
))
4004 if (DECL_RTL (decl
) != 0
4005 && REG_P (DECL_RTL (decl
))
4006 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4007 warning (OPT_Wclobbered
,
4008 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4012 /* Generate warning messages for variables live across setjmp. */
4015 generate_setjmp_warnings (void)
4017 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4019 if (n_basic_blocks
== NUM_FIXED_BLOCKS
4020 || bitmap_empty_p (setjmp_crosses
))
4023 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4024 setjmp_args_warning (setjmp_crosses
);
4028 /* Reverse the order of elements in the fragment chain T of blocks,
4029 and return the new head of the chain (old last element). */
4032 block_fragments_nreverse (tree t
)
4034 tree prev
= 0, block
, next
;
4035 for (block
= t
; block
; block
= next
)
4037 next
= BLOCK_FRAGMENT_CHAIN (block
);
4038 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4044 /* Reverse the order of elements in the chain T of blocks,
4045 and return the new head of the chain (old last element).
4046 Also do the same on subblocks and reverse the order of elements
4047 in BLOCK_FRAGMENT_CHAIN as well. */
4050 blocks_nreverse_all (tree t
)
4052 tree prev
= 0, block
, next
;
4053 for (block
= t
; block
; block
= next
)
4055 next
= BLOCK_CHAIN (block
);
4056 BLOCK_CHAIN (block
) = prev
;
4057 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4058 if (BLOCK_FRAGMENT_CHAIN (block
)
4059 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4060 BLOCK_FRAGMENT_CHAIN (block
)
4061 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4068 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4069 and create duplicate blocks. */
4070 /* ??? Need an option to either create block fragments or to create
4071 abstract origin duplicates of a source block. It really depends
4072 on what optimization has been performed. */
4075 reorder_blocks (void)
4077 tree block
= DECL_INITIAL (current_function_decl
);
4078 VEC(tree
,heap
) *block_stack
;
4080 if (block
== NULL_TREE
)
4083 block_stack
= VEC_alloc (tree
, heap
, 10);
4085 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4086 clear_block_marks (block
);
4088 /* Prune the old trees away, so that they don't get in the way. */
4089 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4090 BLOCK_CHAIN (block
) = NULL_TREE
;
4092 /* Recreate the block tree from the note nesting. */
4093 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4094 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4096 VEC_free (tree
, heap
, block_stack
);
4099 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4102 clear_block_marks (tree block
)
4106 TREE_ASM_WRITTEN (block
) = 0;
4107 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4108 block
= BLOCK_CHAIN (block
);
4113 reorder_blocks_1 (rtx insns
, tree current_block
, VEC(tree
,heap
) **p_block_stack
)
4117 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4121 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4123 tree block
= NOTE_BLOCK (insn
);
4126 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4129 /* If we have seen this block before, that means it now
4130 spans multiple address regions. Create a new fragment. */
4131 if (TREE_ASM_WRITTEN (block
))
4133 tree new_block
= copy_node (block
);
4135 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4136 BLOCK_FRAGMENT_CHAIN (new_block
)
4137 = BLOCK_FRAGMENT_CHAIN (origin
);
4138 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4140 NOTE_BLOCK (insn
) = new_block
;
4144 BLOCK_SUBBLOCKS (block
) = 0;
4145 TREE_ASM_WRITTEN (block
) = 1;
4146 /* When there's only one block for the entire function,
4147 current_block == block and we mustn't do this, it
4148 will cause infinite recursion. */
4149 if (block
!= current_block
)
4151 if (block
!= origin
)
4152 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
);
4154 BLOCK_SUPERCONTEXT (block
) = current_block
;
4155 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4156 BLOCK_SUBBLOCKS (current_block
) = block
;
4157 current_block
= origin
;
4159 VEC_safe_push (tree
, heap
, *p_block_stack
, block
);
4161 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4163 NOTE_BLOCK (insn
) = VEC_pop (tree
, *p_block_stack
);
4164 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4170 /* Reverse the order of elements in the chain T of blocks,
4171 and return the new head of the chain (old last element). */
4174 blocks_nreverse (tree t
)
4176 tree prev
= 0, block
, next
;
4177 for (block
= t
; block
; block
= next
)
4179 next
= BLOCK_CHAIN (block
);
4180 BLOCK_CHAIN (block
) = prev
;
4186 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4187 non-NULL, list them all into VECTOR, in a depth-first preorder
4188 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4192 all_blocks (tree block
, tree
*vector
)
4198 TREE_ASM_WRITTEN (block
) = 0;
4200 /* Record this block. */
4202 vector
[n_blocks
] = block
;
4206 /* Record the subblocks, and their subblocks... */
4207 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4208 vector
? vector
+ n_blocks
: 0);
4209 block
= BLOCK_CHAIN (block
);
4215 /* Return a vector containing all the blocks rooted at BLOCK. The
4216 number of elements in the vector is stored in N_BLOCKS_P. The
4217 vector is dynamically allocated; it is the caller's responsibility
4218 to call `free' on the pointer returned. */
4221 get_block_vector (tree block
, int *n_blocks_p
)
4225 *n_blocks_p
= all_blocks (block
, NULL
);
4226 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4227 all_blocks (block
, block_vector
);
4229 return block_vector
;
4232 static GTY(()) int next_block_index
= 2;
4234 /* Set BLOCK_NUMBER for all the blocks in FN. */
4237 number_blocks (tree fn
)
4243 /* For SDB and XCOFF debugging output, we start numbering the blocks
4244 from 1 within each function, rather than keeping a running
4246 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4247 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4248 next_block_index
= 1;
4251 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4253 /* The top-level BLOCK isn't numbered at all. */
4254 for (i
= 1; i
< n_blocks
; ++i
)
4255 /* We number the blocks from two. */
4256 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4258 free (block_vector
);
4263 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4266 debug_find_var_in_block_tree (tree var
, tree block
)
4270 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4274 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4276 tree ret
= debug_find_var_in_block_tree (var
, t
);
4284 /* Keep track of whether we're in a dummy function context. If we are,
4285 we don't want to invoke the set_current_function hook, because we'll
4286 get into trouble if the hook calls target_reinit () recursively or
4287 when the initial initialization is not yet complete. */
4289 static bool in_dummy_function
;
4291 /* Invoke the target hook when setting cfun. Update the optimization options
4292 if the function uses different options than the default. */
4295 invoke_set_current_function_hook (tree fndecl
)
4297 if (!in_dummy_function
)
4299 tree opts
= ((fndecl
)
4300 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4301 : optimization_default_node
);
4304 opts
= optimization_default_node
;
4306 /* Change optimization options if needed. */
4307 if (optimization_current_node
!= opts
)
4309 optimization_current_node
= opts
;
4310 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4313 targetm
.set_current_function (fndecl
);
4317 /* cfun should never be set directly; use this function. */
4320 set_cfun (struct function
*new_cfun
)
4322 if (cfun
!= new_cfun
)
4325 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4329 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4331 static VEC(function_p
,heap
) *cfun_stack
;
4333 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4336 push_cfun (struct function
*new_cfun
)
4338 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4339 set_cfun (new_cfun
);
4342 /* Pop cfun from the stack. */
4347 struct function
*new_cfun
= VEC_pop (function_p
, cfun_stack
);
4348 set_cfun (new_cfun
);
4351 /* Return value of funcdef and increase it. */
4353 get_next_funcdef_no (void)
4355 return funcdef_no
++;
4358 /* Allocate a function structure for FNDECL and set its contents
4359 to the defaults. Set cfun to the newly-allocated object.
4360 Some of the helper functions invoked during initialization assume
4361 that cfun has already been set. Therefore, assign the new object
4362 directly into cfun and invoke the back end hook explicitly at the
4363 very end, rather than initializing a temporary and calling set_cfun
4366 ABSTRACT_P is true if this is a function that will never be seen by
4367 the middle-end. Such functions are front-end concepts (like C++
4368 function templates) that do not correspond directly to functions
4369 placed in object files. */
4372 allocate_struct_function (tree fndecl
, bool abstract_p
)
4375 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4377 cfun
= ggc_alloc_cleared_function ();
4379 init_eh_for_function ();
4381 if (init_machine_status
)
4382 cfun
->machine
= (*init_machine_status
) ();
4384 #ifdef OVERRIDE_ABI_FORMAT
4385 OVERRIDE_ABI_FORMAT (fndecl
);
4388 invoke_set_current_function_hook (fndecl
);
4390 if (fndecl
!= NULL_TREE
)
4392 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4393 cfun
->decl
= fndecl
;
4394 current_function_funcdef_no
= get_next_funcdef_no ();
4396 result
= DECL_RESULT (fndecl
);
4397 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4399 #ifdef PCC_STATIC_STRUCT_RETURN
4400 cfun
->returns_pcc_struct
= 1;
4402 cfun
->returns_struct
= 1;
4405 cfun
->stdarg
= stdarg_p (fntype
);
4407 /* Assume all registers in stdarg functions need to be saved. */
4408 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4409 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4411 /* ??? This could be set on a per-function basis by the front-end
4412 but is this worth the hassle? */
4413 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4417 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4418 instead of just setting it. */
4421 push_struct_function (tree fndecl
)
4423 VEC_safe_push (function_p
, heap
, cfun_stack
, cfun
);
4424 allocate_struct_function (fndecl
, false);
4427 /* Reset crtl and other non-struct-function variables to defaults as
4428 appropriate for emitting rtl at the start of a function. */
4431 prepare_function_start (void)
4433 gcc_assert (!crtl
->emit
.x_last_insn
);
4436 init_varasm_status ();
4438 default_rtl_profile ();
4440 if (flag_stack_usage
)
4442 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4443 cfun
->su
->static_stack_size
= -1;
4446 cse_not_expected
= ! optimize
;
4448 /* Caller save not needed yet. */
4449 caller_save_needed
= 0;
4451 /* We haven't done register allocation yet. */
4454 /* Indicate that we have not instantiated virtual registers yet. */
4455 virtuals_instantiated
= 0;
4457 /* Indicate that we want CONCATs now. */
4458 generating_concat_p
= 1;
4460 /* Indicate we have no need of a frame pointer yet. */
4461 frame_pointer_needed
= 0;
4464 /* Initialize the rtl expansion mechanism so that we can do simple things
4465 like generate sequences. This is used to provide a context during global
4466 initialization of some passes. You must call expand_dummy_function_end
4467 to exit this context. */
4470 init_dummy_function_start (void)
4472 gcc_assert (!in_dummy_function
);
4473 in_dummy_function
= true;
4474 push_struct_function (NULL_TREE
);
4475 prepare_function_start ();
4478 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4479 and initialize static variables for generating RTL for the statements
4483 init_function_start (tree subr
)
4485 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4486 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4488 allocate_struct_function (subr
, false);
4489 prepare_function_start ();
4491 /* Warn if this value is an aggregate type,
4492 regardless of which calling convention we are using for it. */
4493 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4494 warning (OPT_Waggregate_return
, "function returns an aggregate");
4497 /* Make sure all values used by the optimization passes have sane defaults. */
4499 init_function_for_compilation (void)
4505 struct rtl_opt_pass pass_init_function
=
4509 "*init_function", /* name */
4511 init_function_for_compilation
, /* execute */
4514 0, /* static_pass_number */
4515 TV_NONE
, /* tv_id */
4516 0, /* properties_required */
4517 0, /* properties_provided */
4518 0, /* properties_destroyed */
4519 0, /* todo_flags_start */
4520 0 /* todo_flags_finish */
4526 expand_main_function (void)
4528 #if (defined(INVOKE__main) \
4529 || (!defined(HAS_INIT_SECTION) \
4530 && !defined(INIT_SECTION_ASM_OP) \
4531 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4532 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4536 /* Expand code to initialize the stack_protect_guard. This is invoked at
4537 the beginning of a function to be protected. */
4539 #ifndef HAVE_stack_protect_set
4540 # define HAVE_stack_protect_set 0
4541 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4545 stack_protect_prologue (void)
4547 tree guard_decl
= targetm
.stack_protect_guard ();
4550 x
= expand_normal (crtl
->stack_protect_guard
);
4551 y
= expand_normal (guard_decl
);
4553 /* Allow the target to copy from Y to X without leaking Y into a
4555 if (HAVE_stack_protect_set
)
4557 rtx insn
= gen_stack_protect_set (x
, y
);
4565 /* Otherwise do a straight move. */
4566 emit_move_insn (x
, y
);
4569 /* Expand code to verify the stack_protect_guard. This is invoked at
4570 the end of a function to be protected. */
4572 #ifndef HAVE_stack_protect_test
4573 # define HAVE_stack_protect_test 0
4574 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4578 stack_protect_epilogue (void)
4580 tree guard_decl
= targetm
.stack_protect_guard ();
4581 rtx label
= gen_label_rtx ();
4584 x
= expand_normal (crtl
->stack_protect_guard
);
4585 y
= expand_normal (guard_decl
);
4587 /* Allow the target to compare Y with X without leaking either into
4589 switch (HAVE_stack_protect_test
!= 0)
4592 tmp
= gen_stack_protect_test (x
, y
, label
);
4601 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4605 /* The noreturn predictor has been moved to the tree level. The rtl-level
4606 predictors estimate this branch about 20%, which isn't enough to get
4607 things moved out of line. Since this is the only extant case of adding
4608 a noreturn function at the rtl level, it doesn't seem worth doing ought
4609 except adding the prediction by hand. */
4610 tmp
= get_last_insn ();
4612 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4614 expand_expr_stmt (targetm
.stack_protect_fail ());
4618 /* Start the RTL for a new function, and set variables used for
4620 SUBR is the FUNCTION_DECL node.
4621 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4622 the function's parameters, which must be run at any return statement. */
4625 expand_function_start (tree subr
)
4627 /* Make sure volatile mem refs aren't considered
4628 valid operands of arithmetic insns. */
4629 init_recog_no_volatile ();
4633 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4636 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4638 /* Make the label for return statements to jump to. Do not special
4639 case machines with special return instructions -- they will be
4640 handled later during jump, ifcvt, or epilogue creation. */
4641 return_label
= gen_label_rtx ();
4643 /* Initialize rtx used to return the value. */
4644 /* Do this before assign_parms so that we copy the struct value address
4645 before any library calls that assign parms might generate. */
4647 /* Decide whether to return the value in memory or in a register. */
4648 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4650 /* Returning something that won't go in a register. */
4651 rtx value_address
= 0;
4653 #ifdef PCC_STATIC_STRUCT_RETURN
4654 if (cfun
->returns_pcc_struct
)
4656 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4657 value_address
= assemble_static_space (size
);
4662 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4663 /* Expect to be passed the address of a place to store the value.
4664 If it is passed as an argument, assign_parms will take care of
4668 value_address
= gen_reg_rtx (Pmode
);
4669 emit_move_insn (value_address
, sv
);
4674 rtx x
= value_address
;
4675 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4677 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4678 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4680 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4683 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4684 /* If return mode is void, this decl rtl should not be used. */
4685 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4688 /* Compute the return values into a pseudo reg, which we will copy
4689 into the true return register after the cleanups are done. */
4690 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4691 if (TYPE_MODE (return_type
) != BLKmode
4692 && targetm
.calls
.return_in_msb (return_type
))
4693 /* expand_function_end will insert the appropriate padding in
4694 this case. Use the return value's natural (unpadded) mode
4695 within the function proper. */
4696 SET_DECL_RTL (DECL_RESULT (subr
),
4697 gen_reg_rtx (TYPE_MODE (return_type
)));
4700 /* In order to figure out what mode to use for the pseudo, we
4701 figure out what the mode of the eventual return register will
4702 actually be, and use that. */
4703 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4705 /* Structures that are returned in registers are not
4706 aggregate_value_p, so we may see a PARALLEL or a REG. */
4707 if (REG_P (hard_reg
))
4708 SET_DECL_RTL (DECL_RESULT (subr
),
4709 gen_reg_rtx (GET_MODE (hard_reg
)));
4712 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4713 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4717 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4718 result to the real return register(s). */
4719 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4722 /* Initialize rtx for parameters and local variables.
4723 In some cases this requires emitting insns. */
4724 assign_parms (subr
);
4726 /* If function gets a static chain arg, store it. */
4727 if (cfun
->static_chain_decl
)
4729 tree parm
= cfun
->static_chain_decl
;
4730 rtx local
, chain
, insn
;
4732 local
= gen_reg_rtx (Pmode
);
4733 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4735 set_decl_incoming_rtl (parm
, chain
, false);
4736 SET_DECL_RTL (parm
, local
);
4737 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4739 insn
= emit_move_insn (local
, chain
);
4741 /* Mark the register as eliminable, similar to parameters. */
4743 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4744 set_unique_reg_note (insn
, REG_EQUIV
, chain
);
4747 /* If the function receives a non-local goto, then store the
4748 bits we need to restore the frame pointer. */
4749 if (cfun
->nonlocal_goto_save_area
)
4754 /* ??? We need to do this save early. Unfortunately here is
4755 before the frame variable gets declared. Help out... */
4756 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4757 if (!DECL_RTL_SET_P (var
))
4760 t_save
= build4 (ARRAY_REF
, ptr_type_node
,
4761 cfun
->nonlocal_goto_save_area
,
4762 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4763 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4764 r_save
= convert_memory_address (Pmode
, r_save
);
4766 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4767 update_nonlocal_goto_save_area ();
4770 /* The following was moved from init_function_start.
4771 The move is supposed to make sdb output more accurate. */
4772 /* Indicate the beginning of the function body,
4773 as opposed to parm setup. */
4774 emit_note (NOTE_INSN_FUNCTION_BEG
);
4776 gcc_assert (NOTE_P (get_last_insn ()));
4778 parm_birth_insn
= get_last_insn ();
4783 PROFILE_HOOK (current_function_funcdef_no
);
4787 /* After the display initializations is where the stack checking
4789 if(flag_stack_check
)
4790 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4792 /* Make sure there is a line number after the function entry setup code. */
4793 force_next_line_note ();
4796 /* Undo the effects of init_dummy_function_start. */
4798 expand_dummy_function_end (void)
4800 gcc_assert (in_dummy_function
);
4802 /* End any sequences that failed to be closed due to syntax errors. */
4803 while (in_sequence_p ())
4806 /* Outside function body, can't compute type's actual size
4807 until next function's body starts. */
4809 free_after_parsing (cfun
);
4810 free_after_compilation (cfun
);
4812 in_dummy_function
= false;
4815 /* Call DOIT for each hard register used as a return value from
4816 the current function. */
4819 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4821 rtx outgoing
= crtl
->return_rtx
;
4826 if (REG_P (outgoing
))
4827 (*doit
) (outgoing
, arg
);
4828 else if (GET_CODE (outgoing
) == PARALLEL
)
4832 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4834 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4836 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4843 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4849 clobber_return_register (void)
4851 diddle_return_value (do_clobber_return_reg
, NULL
);
4853 /* In case we do use pseudo to return value, clobber it too. */
4854 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4856 tree decl_result
= DECL_RESULT (current_function_decl
);
4857 rtx decl_rtl
= DECL_RTL (decl_result
);
4858 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4860 do_clobber_return_reg (decl_rtl
, NULL
);
4866 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4872 use_return_register (void)
4874 diddle_return_value (do_use_return_reg
, NULL
);
4877 /* Possibly warn about unused parameters. */
4879 do_warn_unused_parameter (tree fn
)
4883 for (decl
= DECL_ARGUMENTS (fn
);
4884 decl
; decl
= DECL_CHAIN (decl
))
4885 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4886 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4887 && !TREE_NO_WARNING (decl
))
4888 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4891 static GTY(()) rtx initial_trampoline
;
4893 /* Generate RTL for the end of the current function. */
4896 expand_function_end (void)
4900 /* If arg_pointer_save_area was referenced only from a nested
4901 function, we will not have initialized it yet. Do that now. */
4902 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4903 get_arg_pointer_save_area ();
4905 /* If we are doing generic stack checking and this function makes calls,
4906 do a stack probe at the start of the function to ensure we have enough
4907 space for another stack frame. */
4908 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4912 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4915 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
4917 if (STACK_CHECK_MOVING_SP
)
4918 anti_adjust_stack_and_probe (max_frame_size
, true);
4920 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
4923 set_insn_locators (seq
, prologue_locator
);
4924 emit_insn_before (seq
, stack_check_probe_note
);
4929 /* End any sequences that failed to be closed due to syntax errors. */
4930 while (in_sequence_p ())
4933 clear_pending_stack_adjust ();
4934 do_pending_stack_adjust ();
4936 /* Output a linenumber for the end of the function.
4937 SDB depends on this. */
4938 force_next_line_note ();
4939 set_curr_insn_source_location (input_location
);
4941 /* Before the return label (if any), clobber the return
4942 registers so that they are not propagated live to the rest of
4943 the function. This can only happen with functions that drop
4944 through; if there had been a return statement, there would
4945 have either been a return rtx, or a jump to the return label.
4947 We delay actual code generation after the current_function_value_rtx
4949 clobber_after
= get_last_insn ();
4951 /* Output the label for the actual return from the function. */
4952 emit_label (return_label
);
4954 if (targetm
.except_unwind_info (&global_options
) == UI_SJLJ
)
4956 /* Let except.c know where it should emit the call to unregister
4957 the function context for sjlj exceptions. */
4958 if (flag_exceptions
)
4959 sjlj_emit_function_exit_after (get_last_insn ());
4963 /* We want to ensure that instructions that may trap are not
4964 moved into the epilogue by scheduling, because we don't
4965 always emit unwind information for the epilogue. */
4966 if (cfun
->can_throw_non_call_exceptions
)
4967 emit_insn (gen_blockage ());
4970 /* If this is an implementation of throw, do what's necessary to
4971 communicate between __builtin_eh_return and the epilogue. */
4972 expand_eh_return ();
4974 /* If scalar return value was computed in a pseudo-reg, or was a named
4975 return value that got dumped to the stack, copy that to the hard
4977 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4979 tree decl_result
= DECL_RESULT (current_function_decl
);
4980 rtx decl_rtl
= DECL_RTL (decl_result
);
4982 if (REG_P (decl_rtl
)
4983 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
4984 : DECL_REGISTER (decl_result
))
4986 rtx real_decl_rtl
= crtl
->return_rtx
;
4988 /* This should be set in assign_parms. */
4989 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
4991 /* If this is a BLKmode structure being returned in registers,
4992 then use the mode computed in expand_return. Note that if
4993 decl_rtl is memory, then its mode may have been changed,
4994 but that crtl->return_rtx has not. */
4995 if (GET_MODE (real_decl_rtl
) == BLKmode
)
4996 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
4998 /* If a non-BLKmode return value should be padded at the least
4999 significant end of the register, shift it left by the appropriate
5000 amount. BLKmode results are handled using the group load/store
5002 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5003 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5005 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5006 REGNO (real_decl_rtl
)),
5008 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5010 /* If a named return value dumped decl_return to memory, then
5011 we may need to re-do the PROMOTE_MODE signed/unsigned
5013 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5015 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5016 promote_function_mode (TREE_TYPE (decl_result
),
5017 GET_MODE (decl_rtl
), &unsignedp
,
5018 TREE_TYPE (current_function_decl
), 1);
5020 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5022 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5024 /* If expand_function_start has created a PARALLEL for decl_rtl,
5025 move the result to the real return registers. Otherwise, do
5026 a group load from decl_rtl for a named return. */
5027 if (GET_CODE (decl_rtl
) == PARALLEL
)
5028 emit_group_move (real_decl_rtl
, decl_rtl
);
5030 emit_group_load (real_decl_rtl
, decl_rtl
,
5031 TREE_TYPE (decl_result
),
5032 int_size_in_bytes (TREE_TYPE (decl_result
)));
5034 /* In the case of complex integer modes smaller than a word, we'll
5035 need to generate some non-trivial bitfield insertions. Do that
5036 on a pseudo and not the hard register. */
5037 else if (GET_CODE (decl_rtl
) == CONCAT
5038 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5039 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5041 int old_generating_concat_p
;
5044 old_generating_concat_p
= generating_concat_p
;
5045 generating_concat_p
= 0;
5046 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5047 generating_concat_p
= old_generating_concat_p
;
5049 emit_move_insn (tmp
, decl_rtl
);
5050 emit_move_insn (real_decl_rtl
, tmp
);
5053 emit_move_insn (real_decl_rtl
, decl_rtl
);
5057 /* If returning a structure, arrange to return the address of the value
5058 in a place where debuggers expect to find it.
5060 If returning a structure PCC style,
5061 the caller also depends on this value.
5062 And cfun->returns_pcc_struct is not necessarily set. */
5063 if (cfun
->returns_struct
5064 || cfun
->returns_pcc_struct
)
5066 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5067 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5070 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5071 type
= TREE_TYPE (type
);
5073 value_address
= XEXP (value_address
, 0);
5075 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5076 current_function_decl
, true);
5078 /* Mark this as a function return value so integrate will delete the
5079 assignment and USE below when inlining this function. */
5080 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5082 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5083 value_address
= convert_memory_address (GET_MODE (outgoing
),
5086 emit_move_insn (outgoing
, value_address
);
5088 /* Show return register used to hold result (in this case the address
5090 crtl
->return_rtx
= outgoing
;
5093 /* Emit the actual code to clobber return register. */
5098 clobber_return_register ();
5102 emit_insn_after (seq
, clobber_after
);
5105 /* Output the label for the naked return from the function. */
5106 if (naked_return_label
)
5107 emit_label (naked_return_label
);
5109 /* @@@ This is a kludge. We want to ensure that instructions that
5110 may trap are not moved into the epilogue by scheduling, because
5111 we don't always emit unwind information for the epilogue. */
5112 if (cfun
->can_throw_non_call_exceptions
5113 && targetm
.except_unwind_info (&global_options
) != UI_SJLJ
)
5114 emit_insn (gen_blockage ());
5116 /* If stack protection is enabled for this function, check the guard. */
5117 if (crtl
->stack_protect_guard
)
5118 stack_protect_epilogue ();
5120 /* If we had calls to alloca, and this machine needs
5121 an accurate stack pointer to exit the function,
5122 insert some code to save and restore the stack pointer. */
5123 if (! EXIT_IGNORE_STACK
5124 && cfun
->calls_alloca
)
5129 emit_stack_save (SAVE_FUNCTION
, &tem
);
5132 emit_insn_before (seq
, parm_birth_insn
);
5134 emit_stack_restore (SAVE_FUNCTION
, tem
);
5137 /* ??? This should no longer be necessary since stupid is no longer with
5138 us, but there are some parts of the compiler (eg reload_combine, and
5139 sh mach_dep_reorg) that still try and compute their own lifetime info
5140 instead of using the general framework. */
5141 use_return_register ();
5145 get_arg_pointer_save_area (void)
5147 rtx ret
= arg_pointer_save_area
;
5151 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5152 arg_pointer_save_area
= ret
;
5155 if (! crtl
->arg_pointer_save_area_init
)
5159 /* Save the arg pointer at the beginning of the function. The
5160 generated stack slot may not be a valid memory address, so we
5161 have to check it and fix it if necessary. */
5163 emit_move_insn (validize_mem (ret
),
5164 crtl
->args
.internal_arg_pointer
);
5168 push_topmost_sequence ();
5169 emit_insn_after (seq
, entry_of_function ());
5170 pop_topmost_sequence ();
5172 crtl
->arg_pointer_save_area_init
= true;
5178 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5179 for the first time. */
5182 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5185 htab_t hash
= *hashp
;
5189 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5191 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5193 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5194 gcc_assert (*slot
== NULL
);
5199 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5200 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5201 insn, then record COPY as well. */
5204 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5209 hash
= epilogue_insn_hash
;
5210 if (!hash
|| !htab_find (hash
, insn
))
5212 hash
= prologue_insn_hash
;
5213 if (!hash
|| !htab_find (hash
, insn
))
5217 slot
= htab_find_slot (hash
, copy
, INSERT
);
5218 gcc_assert (*slot
== NULL
);
5222 /* Set the locator of the insn chain starting at INSN to LOC. */
5224 set_insn_locators (rtx insn
, int loc
)
5226 while (insn
!= NULL_RTX
)
5229 INSN_LOCATOR (insn
) = loc
;
5230 insn
= NEXT_INSN (insn
);
5234 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5235 we can be running after reorg, SEQUENCE rtl is possible. */
5238 contains (const_rtx insn
, htab_t hash
)
5243 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5246 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5247 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5252 return htab_find (hash
, insn
) != NULL
;
5256 prologue_epilogue_contains (const_rtx insn
)
5258 if (contains (insn
, prologue_insn_hash
))
5260 if (contains (insn
, epilogue_insn_hash
))
5266 /* Insert gen_return at the end of block BB. This also means updating
5267 block_for_insn appropriately. */
5270 emit_return_into_block (basic_block bb
)
5272 emit_jump_insn_after (gen_return (), BB_END (bb
));
5274 #endif /* HAVE_return */
5276 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5277 this into place with notes indicating where the prologue ends and where
5278 the epilogue begins. Update the basic block information when possible. */
5281 thread_prologue_and_epilogue_insns (void)
5284 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5285 edge entry_edge ATTRIBUTE_UNUSED
;
5289 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
5293 epilogue_end
= NULL_RTX
;
5295 /* Can't deal with multiple successors of the entry block at the
5296 moment. Function should always have at least one entry
5298 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5299 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
5301 if (flag_split_stack
5302 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5305 #ifndef HAVE_split_stack_prologue
5308 gcc_assert (HAVE_split_stack_prologue
);
5311 emit_insn (gen_split_stack_prologue ());
5315 record_insns (seq
, NULL
, &prologue_insn_hash
);
5316 set_insn_locators (seq
, prologue_locator
);
5318 /* This relies on the fact that committing the edge insertion
5319 will look for basic blocks within the inserted instructions,
5320 which in turn relies on the fact that we are not in CFG
5321 layout mode here. */
5322 insert_insn_on_edge (seq
, entry_edge
);
5327 #ifdef HAVE_prologue
5331 seq
= gen_prologue ();
5334 /* Insert an explicit USE for the frame pointer
5335 if the profiling is on and the frame pointer is required. */
5336 if (crtl
->profile
&& frame_pointer_needed
)
5337 emit_use (hard_frame_pointer_rtx
);
5339 /* Retain a map of the prologue insns. */
5340 record_insns (seq
, NULL
, &prologue_insn_hash
);
5341 emit_note (NOTE_INSN_PROLOGUE_END
);
5343 /* Ensure that instructions are not moved into the prologue when
5344 profiling is on. The call to the profiling routine can be
5345 emitted within the live range of a call-clobbered register. */
5346 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5347 emit_insn (gen_blockage ());
5351 set_insn_locators (seq
, prologue_locator
);
5353 insert_insn_on_edge (seq
, entry_edge
);
5358 /* If the exit block has no non-fake predecessors, we don't need
5360 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5361 if ((e
->flags
& EDGE_FAKE
) == 0)
5366 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
5368 if (optimize
&& HAVE_return
)
5370 /* If we're allowed to generate a simple return instruction,
5371 then by definition we don't need a full epilogue. Examine
5372 the block that falls through to EXIT. If it does not
5373 contain any code, examine its predecessors and try to
5374 emit (conditional) return instructions. */
5379 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
5384 /* Verify that there are no active instructions in the last block. */
5385 label
= BB_END (last
);
5386 while (label
&& !LABEL_P (label
))
5388 if (active_insn_p (label
))
5390 label
= PREV_INSN (label
);
5393 if (BB_HEAD (last
) == label
&& LABEL_P (label
))
5397 for (ei2
= ei_start (last
->preds
); (e
= ei_safe_edge (ei2
)); )
5399 basic_block bb
= e
->src
;
5402 if (bb
== ENTRY_BLOCK_PTR
)
5409 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5415 /* If we have an unconditional jump, we can replace that
5416 with a simple return instruction. */
5417 if (simplejump_p (jump
))
5419 emit_return_into_block (bb
);
5423 /* If we have a conditional jump, we can try to replace
5424 that with a conditional return instruction. */
5425 else if (condjump_p (jump
))
5427 if (! redirect_jump (jump
, 0, 0))
5433 /* If this block has only one successor, it both jumps
5434 and falls through to the fallthru block, so we can't
5436 if (single_succ_p (bb
))
5448 /* Fix up the CFG for the successful change we just made. */
5449 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5452 /* Emit a return insn for the exit fallthru block. Whether
5453 this is still reachable will be determined later. */
5455 emit_barrier_after (BB_END (last
));
5456 emit_return_into_block (last
);
5457 epilogue_end
= BB_END (last
);
5458 single_succ_edge (last
)->flags
&= ~EDGE_FALLTHRU
;
5464 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5465 this marker for the splits of EH_RETURN patterns, and nothing else
5466 uses the flag in the meantime. */
5467 epilogue_completed
= 1;
5469 #ifdef HAVE_eh_return
5470 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5471 some targets, these get split to a special version of the epilogue
5472 code. In order to be able to properly annotate these with unwind
5473 info, try to split them now. If we get a valid split, drop an
5474 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5475 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5477 rtx prev
, last
, trial
;
5479 if (e
->flags
& EDGE_FALLTHRU
)
5481 last
= BB_END (e
->src
);
5482 if (!eh_returnjump_p (last
))
5485 prev
= PREV_INSN (last
);
5486 trial
= try_split (PATTERN (last
), last
, 1);
5490 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5491 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5495 /* Find the edge that falls through to EXIT. Other edges may exist
5496 due to RETURN instructions, but those don't need epilogues.
5497 There really shouldn't be a mixture -- either all should have
5498 been converted or none, however... */
5500 e
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
5504 #ifdef HAVE_epilogue
5508 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
5509 seq
= gen_epilogue ();
5511 emit_jump_insn (seq
);
5513 /* Retain a map of the epilogue insns. */
5514 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5515 set_insn_locators (seq
, epilogue_locator
);
5520 insert_insn_on_edge (seq
, e
);
5528 if (! next_active_insn (BB_END (e
->src
)))
5530 /* We have a fall-through edge to the exit block, the source is not
5531 at the end of the function, and there will be an assembler epilogue
5532 at the end of the function.
5533 We can't use force_nonfallthru here, because that would try to
5534 use return. Inserting a jump 'by hand' is extremely messy, so
5535 we take advantage of cfg_layout_finalize using
5536 fixup_fallthru_exit_predecessor. */
5537 cfg_layout_initialize (0);
5538 FOR_EACH_BB (cur_bb
)
5539 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5540 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5541 cur_bb
->aux
= cur_bb
->next_bb
;
5542 cfg_layout_finalize ();
5545 default_rtl_profile ();
5549 commit_edge_insertions ();
5551 /* The epilogue insns we inserted may cause the exit edge to no longer
5553 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5555 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5556 && returnjump_p (BB_END (e
->src
)))
5557 e
->flags
&= ~EDGE_FALLTHRU
;
5561 #ifdef HAVE_sibcall_epilogue
5562 /* Emit sibling epilogues before any sibling call sites. */
5563 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
5565 basic_block bb
= e
->src
;
5566 rtx insn
= BB_END (bb
);
5569 || ! SIBLING_CALL_P (insn
))
5576 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5577 emit_insn (gen_sibcall_epilogue ());
5581 /* Retain a map of the epilogue insns. Used in life analysis to
5582 avoid getting rid of sibcall epilogue insns. Do this before we
5583 actually emit the sequence. */
5584 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5585 set_insn_locators (seq
, epilogue_locator
);
5587 emit_insn_before (seq
, insn
);
5592 #ifdef HAVE_epilogue
5597 /* Similarly, move any line notes that appear after the epilogue.
5598 There is no need, however, to be quite so anal about the existence
5599 of such a note. Also possibly move
5600 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5602 for (insn
= epilogue_end
; insn
; insn
= next
)
5604 next
= NEXT_INSN (insn
);
5606 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
5607 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
5612 /* Threading the prologue and epilogue changes the artificial refs
5613 in the entry and exit blocks. */
5614 epilogue_completed
= 1;
5615 df_update_entry_exit_and_calls ();
5618 /* Reposition the prologue-end and epilogue-begin notes after
5619 instruction scheduling. */
5622 reposition_prologue_and_epilogue_notes (void)
5624 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5625 || defined (HAVE_sibcall_epilogue)
5626 /* Since the hash table is created on demand, the fact that it is
5627 non-null is a signal that it is non-empty. */
5628 if (prologue_insn_hash
!= NULL
)
5630 size_t len
= htab_elements (prologue_insn_hash
);
5631 rtx insn
, last
= NULL
, note
= NULL
;
5633 /* Scan from the beginning until we reach the last prologue insn. */
5634 /* ??? While we do have the CFG intact, there are two problems:
5635 (1) The prologue can contain loops (typically probing the stack),
5636 which means that the end of the prologue isn't in the first bb.
5637 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5638 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5642 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
5645 else if (contains (insn
, prologue_insn_hash
))
5657 /* Scan forward looking for the PROLOGUE_END note. It should
5658 be right at the beginning of the block, possibly with other
5659 insn notes that got moved there. */
5660 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
5663 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
5668 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5670 last
= NEXT_INSN (last
);
5671 reorder_insns (note
, note
, last
);
5675 if (epilogue_insn_hash
!= NULL
)
5680 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
5682 rtx insn
, first
= NULL
, note
= NULL
;
5683 basic_block bb
= e
->src
;
5685 /* Scan from the beginning until we reach the first epilogue insn. */
5686 FOR_BB_INSNS (bb
, insn
)
5690 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5697 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
5707 /* If the function has a single basic block, and no real
5708 epilogue insns (e.g. sibcall with no cleanup), the
5709 epilogue note can get scheduled before the prologue
5710 note. If we have frame related prologue insns, having
5711 them scanned during the epilogue will result in a crash.
5712 In this case re-order the epilogue note to just before
5713 the last insn in the block. */
5715 first
= BB_END (bb
);
5717 if (PREV_INSN (first
) != note
)
5718 reorder_insns (note
, note
, PREV_INSN (first
));
5722 #endif /* HAVE_prologue or HAVE_epilogue */
5725 /* Returns the name of the current function. */
5727 current_function_name (void)
5731 return lang_hooks
.decl_printable_name (cfun
->decl
, 2);
5736 rest_of_handle_check_leaf_regs (void)
5738 #ifdef LEAF_REGISTERS
5739 current_function_uses_only_leaf_regs
5740 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
5745 /* Insert a TYPE into the used types hash table of CFUN. */
5748 used_types_insert_helper (tree type
, struct function
*func
)
5750 if (type
!= NULL
&& func
!= NULL
)
5754 if (func
->used_types_hash
== NULL
)
5755 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
5756 htab_eq_pointer
, NULL
);
5757 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
5763 /* Given a type, insert it into the used hash table in cfun. */
5765 used_types_insert (tree t
)
5767 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
5772 if (TREE_CODE (t
) == ERROR_MARK
)
5774 if (TYPE_NAME (t
) == NULL_TREE
5775 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
5776 t
= TYPE_MAIN_VARIANT (t
);
5777 if (debug_info_level
> DINFO_LEVEL_NONE
)
5780 used_types_insert_helper (t
, cfun
);
5782 /* So this might be a type referenced by a global variable.
5783 Record that type so that we can later decide to emit its debug
5785 VEC_safe_push (tree
, gc
, types_used_by_cur_var_decl
, t
);
5789 /* Helper to Hash a struct types_used_by_vars_entry. */
5792 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
5794 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
5796 return iterative_hash_object (entry
->type
,
5797 iterative_hash_object (entry
->var_decl
, 0));
5800 /* Hash function of the types_used_by_vars_entry hash table. */
5803 types_used_by_vars_do_hash (const void *x
)
5805 const struct types_used_by_vars_entry
*entry
=
5806 (const struct types_used_by_vars_entry
*) x
;
5808 return hash_types_used_by_vars_entry (entry
);
5811 /*Equality function of the types_used_by_vars_entry hash table. */
5814 types_used_by_vars_eq (const void *x1
, const void *x2
)
5816 const struct types_used_by_vars_entry
*e1
=
5817 (const struct types_used_by_vars_entry
*) x1
;
5818 const struct types_used_by_vars_entry
*e2
=
5819 (const struct types_used_by_vars_entry
*)x2
;
5821 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
5824 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5827 types_used_by_var_decl_insert (tree type
, tree var_decl
)
5829 if (type
!= NULL
&& var_decl
!= NULL
)
5832 struct types_used_by_vars_entry e
;
5833 e
.var_decl
= var_decl
;
5835 if (types_used_by_vars_hash
== NULL
)
5836 types_used_by_vars_hash
=
5837 htab_create_ggc (37, types_used_by_vars_do_hash
,
5838 types_used_by_vars_eq
, NULL
);
5839 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
5840 hash_types_used_by_vars_entry (&e
), INSERT
);
5843 struct types_used_by_vars_entry
*entry
;
5844 entry
= ggc_alloc_types_used_by_vars_entry ();
5846 entry
->var_decl
= var_decl
;
5852 struct rtl_opt_pass pass_leaf_regs
=
5856 "*leaf_regs", /* name */
5858 rest_of_handle_check_leaf_regs
, /* execute */
5861 0, /* static_pass_number */
5862 TV_NONE
, /* tv_id */
5863 0, /* properties_required */
5864 0, /* properties_provided */
5865 0, /* properties_destroyed */
5866 0, /* todo_flags_start */
5867 0 /* todo_flags_finish */
5872 rest_of_handle_thread_prologue_and_epilogue (void)
5875 cleanup_cfg (CLEANUP_EXPENSIVE
);
5877 /* On some machines, the prologue and epilogue code, or parts thereof,
5878 can be represented as RTL. Doing so lets us schedule insns between
5879 it and the rest of the code and also allows delayed branch
5880 scheduling to operate in the epilogue. */
5881 thread_prologue_and_epilogue_insns ();
5883 /* The stack usage info is finalized during prologue expansion. */
5884 if (flag_stack_usage
)
5885 output_stack_usage ();
5890 struct rtl_opt_pass pass_thread_prologue_and_epilogue
=
5894 "pro_and_epilogue", /* name */
5896 rest_of_handle_thread_prologue_and_epilogue
, /* execute */
5899 0, /* static_pass_number */
5900 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
5901 0, /* properties_required */
5902 0, /* properties_provided */
5903 0, /* properties_destroyed */
5904 TODO_verify_flow
, /* todo_flags_start */
5907 TODO_df_finish
| TODO_verify_rtl_sharing
|
5908 TODO_ggc_collect
/* todo_flags_finish */
5913 /* This mini-pass fixes fall-out from SSA in asm statements that have
5914 in-out constraints. Say you start with
5917 asm ("": "+mr" (inout));
5920 which is transformed very early to use explicit output and match operands:
5923 asm ("": "=mr" (inout) : "0" (inout));
5926 Or, after SSA and copyprop,
5928 asm ("": "=mr" (inout_2) : "0" (inout_1));
5931 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5932 they represent two separate values, so they will get different pseudo
5933 registers during expansion. Then, since the two operands need to match
5934 per the constraints, but use different pseudo registers, reload can
5935 only register a reload for these operands. But reloads can only be
5936 satisfied by hardregs, not by memory, so we need a register for this
5937 reload, just because we are presented with non-matching operands.
5938 So, even though we allow memory for this operand, no memory can be
5939 used for it, just because the two operands don't match. This can
5940 cause reload failures on register-starved targets.
5942 So it's a symptom of reload not being able to use memory for reloads
5943 or, alternatively it's also a symptom of both operands not coming into
5944 reload as matching (in which case the pseudo could go to memory just
5945 fine, as the alternative allows it, and no reload would be necessary).
5946 We fix the latter problem here, by transforming
5948 asm ("": "=mr" (inout_2) : "0" (inout_1));
5953 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5956 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
5959 bool changed
= false;
5960 rtx op
= SET_SRC (p_sets
[0]);
5961 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
5962 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
5963 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
5965 memset (output_matched
, 0, noutputs
* sizeof (bool));
5966 for (i
= 0; i
< ninputs
; i
++)
5968 rtx input
, output
, insns
;
5969 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
5973 if (*constraint
== '%')
5976 match
= strtoul (constraint
, &end
, 10);
5977 if (end
== constraint
)
5980 gcc_assert (match
< noutputs
);
5981 output
= SET_DEST (p_sets
[match
]);
5982 input
= RTVEC_ELT (inputs
, i
);
5983 /* Only do the transformation for pseudos. */
5984 if (! REG_P (output
)
5985 || rtx_equal_p (output
, input
)
5986 || (GET_MODE (input
) != VOIDmode
5987 && GET_MODE (input
) != GET_MODE (output
)))
5990 /* We can't do anything if the output is also used as input,
5991 as we're going to overwrite it. */
5992 for (j
= 0; j
< ninputs
; j
++)
5993 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
5998 /* Avoid changing the same input several times. For
5999 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6000 only change in once (to out1), rather than changing it
6001 first to out1 and afterwards to out2. */
6004 for (j
= 0; j
< noutputs
; j
++)
6005 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6010 output_matched
[match
] = true;
6013 emit_move_insn (output
, input
);
6014 insns
= get_insns ();
6016 emit_insn_before (insns
, insn
);
6018 /* Now replace all mentions of the input with output. We can't
6019 just replace the occurrence in inputs[i], as the register might
6020 also be used in some other input (or even in an address of an
6021 output), which would mean possibly increasing the number of
6022 inputs by one (namely 'output' in addition), which might pose
6023 a too complicated problem for reload to solve. E.g. this situation:
6025 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6027 Here 'input' is used in two occurrences as input (once for the
6028 input operand, once for the address in the second output operand).
6029 If we would replace only the occurrence of the input operand (to
6030 make the matching) we would be left with this:
6033 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6035 Now we suddenly have two different input values (containing the same
6036 value, but different pseudos) where we formerly had only one.
6037 With more complicated asms this might lead to reload failures
6038 which wouldn't have happen without this pass. So, iterate over
6039 all operands and replace all occurrences of the register used. */
6040 for (j
= 0; j
< noutputs
; j
++)
6041 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6042 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6043 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6045 for (j
= 0; j
< ninputs
; j
++)
6046 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6047 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6054 df_insn_rescan (insn
);
6058 rest_of_match_asm_constraints (void)
6061 rtx insn
, pat
, *p_sets
;
6064 if (!crtl
->has_asm_statement
)
6067 df_set_flags (DF_DEFER_INSN_RESCAN
);
6070 FOR_BB_INSNS (bb
, insn
)
6075 pat
= PATTERN (insn
);
6076 if (GET_CODE (pat
) == PARALLEL
)
6077 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6078 else if (GET_CODE (pat
) == SET
)
6079 p_sets
= &PATTERN (insn
), noutputs
= 1;
6083 if (GET_CODE (*p_sets
) == SET
6084 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6085 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6089 return TODO_df_finish
;
6092 struct rtl_opt_pass pass_match_asm_constraints
=
6096 "asmcons", /* name */
6098 rest_of_match_asm_constraints
, /* execute */
6101 0, /* static_pass_number */
6102 TV_NONE
, /* tv_id */
6103 0, /* properties_required */
6104 0, /* properties_provided */
6105 0, /* properties_destroyed */
6106 0, /* todo_flags_start */
6107 TODO_dump_func
/* todo_flags_finish */
6112 #include "gt-function.h"