1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
46 #include "stringpool.h"
52 #include "rtl-error.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
62 #include "optabs-tree.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
67 #include "tree-pass.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
76 #include "tree-chkp.h"
80 #include "stringpool.h"
83 /* So we can assign to cfun in this file. */
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92 /* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97 /* Similar, but round to the next highest integer that meets the
99 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101 /* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105 int virtuals_instantiated
;
107 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
108 static GTY(()) int funcdef_no
;
110 /* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112 struct machine_function
* (*init_machine_status
) (void);
114 /* The currently compiled function. */
115 struct function
*cfun
= 0;
117 /* These hashes record the prologue and epilogue insns. */
119 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
121 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
122 static bool equal (rtx a
, rtx b
) { return a
== b
; }
126 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
128 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
131 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
132 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
134 /* Forward declarations. */
136 static struct temp_slot
*find_temp_slot_from_address (rtx
);
137 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
138 static void pad_below (struct args_size
*, machine_mode
, tree
);
139 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
140 static int all_blocks (tree
, tree
*);
141 static tree
*get_block_vector (tree
, int *);
142 extern tree
debug_find_var_in_block_tree (tree
, tree
);
143 /* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
147 static bool contains (const rtx_insn
*, hash_table
<insn_cache_hasher
> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx
, void *);
150 static void do_use_return_reg (rtx
, void *);
153 /* Stack of nested functions. */
154 /* Keep track of the cfun stack. */
156 static vec
<function
*> function_context_stack
;
158 /* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
162 push_function_context (void)
165 allocate_struct_function (NULL
, false);
167 function_context_stack
.safe_push (cfun
);
171 /* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
175 pop_function_context (void)
177 struct function
*p
= function_context_stack
.pop ();
179 current_function_decl
= p
->decl
;
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated
= 0;
183 generating_concat_p
= 1;
186 /* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
191 free_after_parsing (struct function
*f
)
196 /* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
201 free_after_compilation (struct function
*f
)
203 prologue_insn_hash
= NULL
;
204 epilogue_insn_hash
= NULL
;
206 free (crtl
->emit
.regno_pointer_align
);
208 memset (crtl
, 0, sizeof (struct rtl_data
));
212 f
->curr_properties
&= ~PROP_cfg
;
214 regno_reg_rtx
= NULL
;
217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
222 get_frame_size (void)
224 if (FRAME_GROWS_DOWNWARD
)
225 return -frame_offset
;
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
235 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
237 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
239 if (size
> (HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD
)
243 error_at (DECL_SOURCE_LOCATION (func
),
244 "total size of local objects too large");
251 /* Return the minimum spill slot alignment for a register of mode MODE. */
254 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED
)
256 return STACK_SLOT_ALIGNMENT (NULL_TREE
, mode
, GET_MODE_ALIGNMENT (mode
));
259 /* Return stack slot alignment in bits for TYPE and MODE. */
262 get_stack_local_alignment (tree type
, machine_mode mode
)
264 unsigned int alignment
;
267 alignment
= BIGGEST_ALIGNMENT
;
269 alignment
= GET_MODE_ALIGNMENT (mode
);
271 /* Allow the frond-end to (possibly) increase the alignment of this
274 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
276 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
287 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
288 HOST_WIDE_INT size
, unsigned int alignment
,
289 HOST_WIDE_INT
*poffset
)
291 HOST_WIDE_INT this_frame_offset
;
292 int frame_off
, frame_alignment
, frame_phase
;
294 /* Calculate how many bytes the start of local variables is off from
296 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
297 frame_off
= targetm
.starting_frame_offset () % frame_alignment
;
298 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD
)
308 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
309 (unsigned HOST_WIDE_INT
) alignment
)
313 = (CEIL_ROUND (start
- frame_phase
,
314 (unsigned HOST_WIDE_INT
) alignment
)
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
321 frame_offset
= this_frame_offset
;
322 else if (this_frame_offset
< start
)
324 else if (start
+ length
== frame_offset
325 && this_frame_offset
+ size
> start
+ length
)
326 frame_offset
= this_frame_offset
+ size
;
327 else if (this_frame_offset
+ size
> start
+ length
)
330 *poffset
= this_frame_offset
;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
339 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
341 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
342 space
->next
= crtl
->frame_space_list
;
343 crtl
->frame_space_list
= space
;
344 space
->start
= start
;
345 space
->length
= end
- start
;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (machine_mode mode
, HOST_WIDE_INT size
,
370 int bigend_correction
= 0;
371 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
372 unsigned int alignment
, alignment_in_bits
;
376 alignment
= get_stack_local_alignment (NULL
, mode
);
377 alignment
/= BITS_PER_UNIT
;
379 else if (align
== -1)
381 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
382 size
= CEIL_ROUND (size
, alignment
);
384 else if (align
== -2)
385 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 alignment
= align
/ BITS_PER_UNIT
;
389 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
394 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
395 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
398 if (SUPPORTS_STACK_ALIGNMENT
)
400 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
402 if (!crtl
->stack_realign_processed
)
403 crtl
->stack_alignment_estimated
= alignment_in_bits
;
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
411 gcc_assert (!crtl
->stack_realign_finalized
);
412 if (!crtl
->stack_realign_needed
)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
419 || (crtl
->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode
)));
421 alignment_in_bits
= crtl
->stack_alignment_estimated
;
422 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
428 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
429 crtl
->stack_alignment_needed
= alignment_in_bits
;
430 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
431 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
433 if (mode
!= BLKmode
|| size
!= 0)
435 if (kind
& ASLK_RECORD_PAD
)
437 struct frame_space
**psp
;
439 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
441 struct frame_space
*space
= *psp
;
442 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
443 alignment
, &slot_offset
))
446 if (slot_offset
> space
->start
)
447 add_frame_space (space
->start
, slot_offset
);
448 if (slot_offset
+ size
< space
->start
+ space
->length
)
449 add_frame_space (slot_offset
+ size
,
450 space
->start
+ space
->length
);
455 else if (!STACK_ALIGNMENT_NEEDED
)
457 slot_offset
= frame_offset
;
461 old_frame_offset
= frame_offset
;
463 if (FRAME_GROWS_DOWNWARD
)
465 frame_offset
-= size
;
466 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
468 if (kind
& ASLK_RECORD_PAD
)
470 if (slot_offset
> frame_offset
)
471 add_frame_space (frame_offset
, slot_offset
);
472 if (slot_offset
+ size
< old_frame_offset
)
473 add_frame_space (slot_offset
+ size
, old_frame_offset
);
478 frame_offset
+= size
;
479 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
481 if (kind
& ASLK_RECORD_PAD
)
483 if (slot_offset
> old_frame_offset
)
484 add_frame_space (old_frame_offset
, slot_offset
);
485 if (slot_offset
+ size
< frame_offset
)
486 add_frame_space (slot_offset
+ size
, frame_offset
);
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
494 bigend_correction
= size
- GET_MODE_SIZE (mode
);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated
)
499 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
501 (slot_offset
+ bigend_correction
502 + targetm
.starting_frame_offset (), Pmode
));
504 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
506 (slot_offset
+ bigend_correction
,
509 x
= gen_rtx_MEM (mode
, addr
);
510 set_mem_align (x
, alignment_in_bits
);
511 MEM_NOTRAP_P (x
) = 1;
513 vec_safe_push (stack_slot_list
, x
);
515 if (frame_offset_overflow (frame_offset
, current_function_decl
))
521 /* Wrap up assign_stack_local_1 with last parameter as false. */
524 assign_stack_local (machine_mode mode
, HOST_WIDE_INT size
, int align
)
526 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
529 /* In order to evaluate some expressions, such as function calls returning
530 structures in memory, we need to temporarily allocate stack locations.
531 We record each allocated temporary in the following structure.
533 Associated with each temporary slot is a nesting level. When we pop up
534 one level, all temporaries associated with the previous level are freed.
535 Normally, all temporaries are freed after the execution of the statement
536 in which they were created. However, if we are inside a ({...}) grouping,
537 the result may be in a temporary and hence must be preserved. If the
538 result could be in a temporary, we preserve it if we can determine which
539 one it is in. If we cannot determine which temporary may contain the
540 result, all temporaries are preserved. A temporary is preserved by
541 pretending it was allocated at the previous nesting level. */
543 struct GTY(()) temp_slot
{
544 /* Points to next temporary slot. */
545 struct temp_slot
*next
;
546 /* Points to previous temporary slot. */
547 struct temp_slot
*prev
;
548 /* The rtx to used to reference the slot. */
550 /* The size, in units, of the slot. */
552 /* The type of the object in the slot, or zero if it doesn't correspond
553 to a type. We use this to determine whether a slot can be reused.
554 It can be reused if objects of the type of the new slot will always
555 conflict with objects of the type of the old slot. */
557 /* The alignment (in bits) of the slot. */
559 /* Nonzero if this temporary is currently in use. */
561 /* Nesting level at which this slot is being used. */
563 /* The offset of the slot from the frame_pointer, including extra space
564 for alignment. This info is for combine_temp_slots. */
565 HOST_WIDE_INT base_offset
;
566 /* The size of the slot, including extra space for alignment. This
567 info is for combine_temp_slots. */
568 HOST_WIDE_INT full_size
;
571 /* Entry for the below hash table. */
572 struct GTY((for_user
)) temp_slot_address_entry
{
575 struct temp_slot
*temp_slot
;
578 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
580 static hashval_t
hash (temp_slot_address_entry
*);
581 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
584 /* A table of addresses that represent a stack slot. The table is a mapping
585 from address RTXen to a temp slot. */
586 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
587 static size_t n_temp_slots_in_use
;
589 /* Removes temporary slot TEMP from LIST. */
592 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
595 temp
->next
->prev
= temp
->prev
;
597 temp
->prev
->next
= temp
->next
;
601 temp
->prev
= temp
->next
= NULL
;
604 /* Inserts temporary slot TEMP to LIST. */
607 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
611 (*list
)->prev
= temp
;
616 /* Returns the list of used temp slots at LEVEL. */
618 static struct temp_slot
**
619 temp_slots_at_level (int level
)
621 if (level
>= (int) vec_safe_length (used_temp_slots
))
622 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
624 return &(*used_temp_slots
)[level
];
627 /* Returns the maximal temporary slot level. */
630 max_slot_level (void)
632 if (!used_temp_slots
)
635 return used_temp_slots
->length () - 1;
638 /* Moves temporary slot TEMP to LEVEL. */
641 move_slot_to_level (struct temp_slot
*temp
, int level
)
643 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
644 insert_slot_to_list (temp
, temp_slots_at_level (level
));
648 /* Make temporary slot TEMP available. */
651 make_slot_available (struct temp_slot
*temp
)
653 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
654 insert_slot_to_list (temp
, &avail_temp_slots
);
657 n_temp_slots_in_use
--;
660 /* Compute the hash value for an address -> temp slot mapping.
661 The value is cached on the mapping entry. */
663 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
665 int do_not_record
= 0;
666 return hash_rtx (t
->address
, GET_MODE (t
->address
),
667 &do_not_record
, NULL
, false);
670 /* Return the hash value for an address -> temp slot mapping. */
672 temp_address_hasher::hash (temp_slot_address_entry
*t
)
677 /* Compare two address -> temp slot mapping entries. */
679 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
680 temp_slot_address_entry
*t2
)
682 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
685 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
687 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
689 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
690 t
->address
= address
;
691 t
->temp_slot
= temp_slot
;
692 t
->hash
= temp_slot_address_compute_hash (t
);
693 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
696 /* Remove an address -> temp slot mapping entry if the temp slot is
697 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
699 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
701 const struct temp_slot_address_entry
*t
= *slot
;
702 if (! t
->temp_slot
->in_use
)
703 temp_slot_address_table
->clear_slot (slot
);
707 /* Remove all mappings of addresses to unused temp slots. */
709 remove_unused_temp_slot_addresses (void)
711 /* Use quicker clearing if there aren't any active temp slots. */
712 if (n_temp_slots_in_use
)
713 temp_slot_address_table
->traverse
714 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
716 temp_slot_address_table
->empty ();
719 /* Find the temp slot corresponding to the object at address X. */
721 static struct temp_slot
*
722 find_temp_slot_from_address (rtx x
)
725 struct temp_slot_address_entry tmp
, *t
;
727 /* First try the easy way:
728 See if X exists in the address -> temp slot mapping. */
730 tmp
.temp_slot
= NULL
;
731 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
732 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
736 /* If we have a sum involving a register, see if it points to a temp
738 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
739 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
741 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
742 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
745 /* Last resort: Address is a virtual stack var address. */
746 if (GET_CODE (x
) == PLUS
747 && XEXP (x
, 0) == virtual_stack_vars_rtx
748 && CONST_INT_P (XEXP (x
, 1)))
751 for (i
= max_slot_level (); i
>= 0; i
--)
752 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
754 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
755 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
763 /* Allocate a temporary stack slot and record it for possible later
766 MODE is the machine mode to be given to the returned rtx.
768 SIZE is the size in units of the space required. We do no rounding here
769 since assign_stack_local will do any required rounding.
771 TYPE is the type that will be used for the stack slot. */
774 assign_stack_temp_for_type (machine_mode mode
, HOST_WIDE_INT size
,
778 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
781 /* If SIZE is -1 it means that somebody tried to allocate a temporary
782 of a variable size. */
783 gcc_assert (size
!= -1);
785 align
= get_stack_local_alignment (type
, mode
);
787 /* Try to find an available, already-allocated temporary of the proper
788 mode which meets the size and alignment requirements. Choose the
789 smallest one with the closest alignment.
791 If assign_stack_temp is called outside of the tree->rtl expansion,
792 we cannot reuse the stack slots (that may still refer to
793 VIRTUAL_STACK_VARS_REGNUM). */
794 if (!virtuals_instantiated
)
796 for (p
= avail_temp_slots
; p
; p
= p
->next
)
798 if (p
->align
>= align
&& p
->size
>= size
799 && GET_MODE (p
->slot
) == mode
800 && objects_must_conflict_p (p
->type
, type
)
801 && (best_p
== 0 || best_p
->size
> p
->size
802 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
804 if (p
->align
== align
&& p
->size
== size
)
807 cut_slot_from_list (selected
, &avail_temp_slots
);
816 /* Make our best, if any, the one to use. */
820 cut_slot_from_list (selected
, &avail_temp_slots
);
822 /* If there are enough aligned bytes left over, make them into a new
823 temp_slot so that the extra bytes don't get wasted. Do this only
824 for BLKmode slots, so that we can be sure of the alignment. */
825 if (GET_MODE (best_p
->slot
) == BLKmode
)
827 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
828 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
830 if (best_p
->size
- rounded_size
>= alignment
)
832 p
= ggc_alloc
<temp_slot
> ();
834 p
->size
= best_p
->size
- rounded_size
;
835 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
836 p
->full_size
= best_p
->full_size
- rounded_size
;
837 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
838 p
->align
= best_p
->align
;
839 p
->type
= best_p
->type
;
840 insert_slot_to_list (p
, &avail_temp_slots
);
842 vec_safe_push (stack_slot_list
, p
->slot
);
844 best_p
->size
= rounded_size
;
845 best_p
->full_size
= rounded_size
;
850 /* If we still didn't find one, make a new temporary. */
853 HOST_WIDE_INT frame_offset_old
= frame_offset
;
855 p
= ggc_alloc
<temp_slot
> ();
857 /* We are passing an explicit alignment request to assign_stack_local.
858 One side effect of that is assign_stack_local will not round SIZE
859 to ensure the frame offset remains suitably aligned.
861 So for requests which depended on the rounding of SIZE, we go ahead
862 and round it now. We also make sure ALIGNMENT is at least
863 BIGGEST_ALIGNMENT. */
864 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
865 p
->slot
= assign_stack_local_1 (mode
,
875 /* The following slot size computation is necessary because we don't
876 know the actual size of the temporary slot until assign_stack_local
877 has performed all the frame alignment and size rounding for the
878 requested temporary. Note that extra space added for alignment
879 can be either above or below this stack slot depending on which
880 way the frame grows. We include the extra space if and only if it
881 is above this slot. */
882 if (FRAME_GROWS_DOWNWARD
)
883 p
->size
= frame_offset_old
- frame_offset
;
887 /* Now define the fields used by combine_temp_slots. */
888 if (FRAME_GROWS_DOWNWARD
)
890 p
->base_offset
= frame_offset
;
891 p
->full_size
= frame_offset_old
- frame_offset
;
895 p
->base_offset
= frame_offset_old
;
896 p
->full_size
= frame_offset
- frame_offset_old
;
905 p
->level
= temp_slot_level
;
906 n_temp_slots_in_use
++;
908 pp
= temp_slots_at_level (p
->level
);
909 insert_slot_to_list (p
, pp
);
910 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
912 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
913 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
914 vec_safe_push (stack_slot_list
, slot
);
916 /* If we know the alias set for the memory that will be used, use
917 it. If there's no TYPE, then we don't know anything about the
918 alias set for the memory. */
919 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
920 set_mem_align (slot
, align
);
922 /* If a type is specified, set the relevant flags. */
924 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
925 MEM_NOTRAP_P (slot
) = 1;
930 /* Allocate a temporary stack slot and record it for possible later
931 reuse. First two arguments are same as in preceding function. */
934 assign_stack_temp (machine_mode mode
, HOST_WIDE_INT size
)
936 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
939 /* Assign a temporary.
940 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
941 and so that should be used in error messages. In either case, we
942 allocate of the given type.
943 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
944 it is 0 if a register is OK.
945 DONT_PROMOTE is 1 if we should not promote values in register
949 assign_temp (tree type_or_decl
, int memory_required
,
950 int dont_promote ATTRIBUTE_UNUSED
)
958 if (DECL_P (type_or_decl
))
959 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
961 decl
= NULL
, type
= type_or_decl
;
963 mode
= TYPE_MODE (type
);
965 unsignedp
= TYPE_UNSIGNED (type
);
968 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
969 end. See also create_tmp_var for the gimplification-time check. */
970 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
972 if (mode
== BLKmode
|| memory_required
)
974 HOST_WIDE_INT size
= int_size_in_bytes (type
);
977 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
978 problems with allocating the stack space. */
982 /* Unfortunately, we don't yet know how to allocate variable-sized
983 temporaries. However, sometimes we can find a fixed upper limit on
984 the size, so try that instead. */
986 size
= max_int_size_in_bytes (type
);
988 /* The size of the temporary may be too large to fit into an integer. */
989 /* ??? Not sure this should happen except for user silliness, so limit
990 this to things that aren't compiler-generated temporaries. The
991 rest of the time we'll die in assign_stack_temp_for_type. */
992 if (decl
&& size
== -1
993 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
995 error ("size of variable %q+D is too large", decl
);
999 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
1005 mode
= promote_mode (type
, mode
, &unsignedp
);
1008 return gen_reg_rtx (mode
);
1011 /* Combine temporary stack slots which are adjacent on the stack.
1013 This allows for better use of already allocated stack space. This is only
1014 done for BLKmode slots because we can be sure that we won't have alignment
1015 problems in this case. */
1018 combine_temp_slots (void)
1020 struct temp_slot
*p
, *q
, *next
, *next_q
;
1023 /* We can't combine slots, because the information about which slot
1024 is in which alias set will be lost. */
1025 if (flag_strict_aliasing
)
1028 /* If there are a lot of temp slots, don't do anything unless
1029 high levels of optimization. */
1030 if (! flag_expensive_optimizations
)
1031 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1032 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1035 for (p
= avail_temp_slots
; p
; p
= next
)
1041 if (GET_MODE (p
->slot
) != BLKmode
)
1044 for (q
= p
->next
; q
; q
= next_q
)
1050 if (GET_MODE (q
->slot
) != BLKmode
)
1053 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1055 /* Q comes after P; combine Q into P. */
1057 p
->full_size
+= q
->full_size
;
1060 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1062 /* P comes after Q; combine P into Q. */
1064 q
->full_size
+= p
->full_size
;
1069 cut_slot_from_list (q
, &avail_temp_slots
);
1072 /* Either delete P or advance past it. */
1074 cut_slot_from_list (p
, &avail_temp_slots
);
1078 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1079 slot that previously was known by OLD_RTX. */
1082 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1084 struct temp_slot
*p
;
1086 if (rtx_equal_p (old_rtx
, new_rtx
))
1089 p
= find_temp_slot_from_address (old_rtx
);
1091 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1092 NEW_RTX is a register, see if one operand of the PLUS is a
1093 temporary location. If so, NEW_RTX points into it. Otherwise,
1094 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1095 in common between them. If so, try a recursive call on those
1099 if (GET_CODE (old_rtx
) != PLUS
)
1102 if (REG_P (new_rtx
))
1104 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1105 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1108 else if (GET_CODE (new_rtx
) != PLUS
)
1111 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1112 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1113 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1114 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1115 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1116 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1117 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1118 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1123 /* Otherwise add an alias for the temp's address. */
1124 insert_temp_slot_address (new_rtx
, p
);
1127 /* If X could be a reference to a temporary slot, mark that slot as
1128 belonging to the to one level higher than the current level. If X
1129 matched one of our slots, just mark that one. Otherwise, we can't
1130 easily predict which it is, so upgrade all of them.
1132 This is called when an ({...}) construct occurs and a statement
1133 returns a value in memory. */
1136 preserve_temp_slots (rtx x
)
1138 struct temp_slot
*p
= 0, *next
;
1143 /* If X is a register that is being used as a pointer, see if we have
1144 a temporary slot we know it points to. */
1145 if (REG_P (x
) && REG_POINTER (x
))
1146 p
= find_temp_slot_from_address (x
);
1148 /* If X is not in memory or is at a constant address, it cannot be in
1149 a temporary slot. */
1150 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1153 /* First see if we can find a match. */
1155 p
= find_temp_slot_from_address (XEXP (x
, 0));
1159 if (p
->level
== temp_slot_level
)
1160 move_slot_to_level (p
, temp_slot_level
- 1);
1164 /* Otherwise, preserve all non-kept slots at this level. */
1165 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1168 move_slot_to_level (p
, temp_slot_level
- 1);
1172 /* Free all temporaries used so far. This is normally called at the
1173 end of generating code for a statement. */
1176 free_temp_slots (void)
1178 struct temp_slot
*p
, *next
;
1179 bool some_available
= false;
1181 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1184 make_slot_available (p
);
1185 some_available
= true;
1190 remove_unused_temp_slot_addresses ();
1191 combine_temp_slots ();
1195 /* Push deeper into the nesting level for stack temporaries. */
1198 push_temp_slots (void)
1203 /* Pop a temporary nesting level. All slots in use in the current level
1207 pop_temp_slots (void)
1213 /* Initialize temporary slots. */
1216 init_temp_slots (void)
1218 /* We have not allocated any temporaries yet. */
1219 avail_temp_slots
= 0;
1220 vec_alloc (used_temp_slots
, 0);
1221 temp_slot_level
= 0;
1222 n_temp_slots_in_use
= 0;
1224 /* Set up the table to map addresses to temp slots. */
1225 if (! temp_slot_address_table
)
1226 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1228 temp_slot_address_table
->empty ();
1231 /* Functions and data structures to keep track of the values hard regs
1232 had at the start of the function. */
1234 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1235 and has_hard_reg_initial_val.. */
1236 struct GTY(()) initial_value_pair
{
1240 /* ??? This could be a VEC but there is currently no way to define an
1241 opaque VEC type. This could be worked around by defining struct
1242 initial_value_pair in function.h. */
1243 struct GTY(()) initial_value_struct
{
1246 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1249 /* If a pseudo represents an initial hard reg (or expression), return
1250 it, else return NULL_RTX. */
1253 get_hard_reg_initial_reg (rtx reg
)
1255 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1261 for (i
= 0; i
< ivs
->num_entries
; i
++)
1262 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1263 return ivs
->entries
[i
].hard_reg
;
1268 /* Make sure that there's a pseudo register of mode MODE that stores the
1269 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1272 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1274 struct initial_value_struct
*ivs
;
1277 rv
= has_hard_reg_initial_val (mode
, regno
);
1281 ivs
= crtl
->hard_reg_initial_vals
;
1284 ivs
= ggc_alloc
<initial_value_struct
> ();
1285 ivs
->num_entries
= 0;
1286 ivs
->max_entries
= 5;
1287 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1288 crtl
->hard_reg_initial_vals
= ivs
;
1291 if (ivs
->num_entries
>= ivs
->max_entries
)
1293 ivs
->max_entries
+= 5;
1294 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1298 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1299 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1301 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1304 /* See if get_hard_reg_initial_val has been used to create a pseudo
1305 for the initial value of hard register REGNO in mode MODE. Return
1306 the associated pseudo if so, otherwise return NULL. */
1309 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1311 struct initial_value_struct
*ivs
;
1314 ivs
= crtl
->hard_reg_initial_vals
;
1316 for (i
= 0; i
< ivs
->num_entries
; i
++)
1317 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1318 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1319 return ivs
->entries
[i
].pseudo
;
1325 emit_initial_value_sets (void)
1327 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1335 for (i
= 0; i
< ivs
->num_entries
; i
++)
1336 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1340 emit_insn_at_entry (seq
);
1344 /* Return the hardreg-pseudoreg initial values pair entry I and
1345 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1347 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1349 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1350 if (!ivs
|| i
>= ivs
->num_entries
)
1353 *hreg
= ivs
->entries
[i
].hard_reg
;
1354 *preg
= ivs
->entries
[i
].pseudo
;
1358 /* These routines are responsible for converting virtual register references
1359 to the actual hard register references once RTL generation is complete.
1361 The following four variables are used for communication between the
1362 routines. They contain the offsets of the virtual registers from their
1363 respective hard registers. */
1365 static int in_arg_offset
;
1366 static int var_offset
;
1367 static int dynamic_offset
;
1368 static int out_arg_offset
;
1369 static int cfa_offset
;
1371 /* In most machines, the stack pointer register is equivalent to the bottom
1374 #ifndef STACK_POINTER_OFFSET
1375 #define STACK_POINTER_OFFSET 0
1378 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1379 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1382 /* If not defined, pick an appropriate default for the offset of dynamically
1383 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1384 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1386 #ifndef STACK_DYNAMIC_OFFSET
1388 /* The bottom of the stack points to the actual arguments. If
1389 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1390 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1391 stack space for register parameters is not pushed by the caller, but
1392 rather part of the fixed stack areas and hence not included in
1393 `crtl->outgoing_args_size'. Nevertheless, we must allow
1394 for it when allocating stack dynamic objects. */
1396 #ifdef INCOMING_REG_PARM_STACK_SPACE
1397 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1398 ((ACCUMULATE_OUTGOING_ARGS \
1399 ? (crtl->outgoing_args_size \
1400 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1401 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1402 : 0) + (STACK_POINTER_OFFSET))
1404 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1405 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1406 + (STACK_POINTER_OFFSET))
1411 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1412 is a virtual register, return the equivalent hard register and set the
1413 offset indirectly through the pointer. Otherwise, return 0. */
1416 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1419 HOST_WIDE_INT offset
;
1421 if (x
== virtual_incoming_args_rtx
)
1423 if (stack_realign_drap
)
1425 /* Replace virtual_incoming_args_rtx with internal arg
1426 pointer if DRAP is used to realign stack. */
1427 new_rtx
= crtl
->args
.internal_arg_pointer
;
1431 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1433 else if (x
== virtual_stack_vars_rtx
)
1434 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1435 else if (x
== virtual_stack_dynamic_rtx
)
1436 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1437 else if (x
== virtual_outgoing_args_rtx
)
1438 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1439 else if (x
== virtual_cfa_rtx
)
1441 #ifdef FRAME_POINTER_CFA_OFFSET
1442 new_rtx
= frame_pointer_rtx
;
1444 new_rtx
= arg_pointer_rtx
;
1446 offset
= cfa_offset
;
1448 else if (x
== virtual_preferred_stack_boundary_rtx
)
1450 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1460 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1461 registers present inside of *LOC. The expression is simplified,
1462 as much as possible, but is not to be considered "valid" in any sense
1463 implied by the target. Return true if any change is made. */
1466 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1470 bool changed
= false;
1471 subrtx_ptr_iterator::array_type array
;
1472 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1478 HOST_WIDE_INT offset
;
1479 switch (GET_CODE (x
))
1482 new_rtx
= instantiate_new_reg (x
, &offset
);
1485 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1488 iter
.skip_subrtxes ();
1492 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1495 XEXP (x
, 0) = new_rtx
;
1496 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1498 iter
.skip_subrtxes ();
1502 /* FIXME -- from old code */
1503 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1504 we can commute the PLUS and SUBREG because pointers into the
1505 frame are well-behaved. */
1516 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1517 matches the predicate for insn CODE operand OPERAND. */
1520 safe_insn_predicate (int code
, int operand
, rtx x
)
1522 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1525 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1526 registers present inside of insn. The result will be a valid insn. */
1529 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1531 HOST_WIDE_INT offset
;
1533 bool any_change
= false;
1534 rtx set
, new_rtx
, x
;
1537 /* There are some special cases to be handled first. */
1538 set
= single_set (insn
);
1541 /* We're allowed to assign to a virtual register. This is interpreted
1542 to mean that the underlying register gets assigned the inverse
1543 transformation. This is used, for example, in the handling of
1545 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1550 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1551 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1552 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1553 x
= force_operand (x
, new_rtx
);
1555 emit_move_insn (new_rtx
, x
);
1560 emit_insn_before (seq
, insn
);
1565 /* Handle a straight copy from a virtual register by generating a
1566 new add insn. The difference between this and falling through
1567 to the generic case is avoiding a new pseudo and eliminating a
1568 move insn in the initial rtl stream. */
1569 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1570 if (new_rtx
&& offset
!= 0
1571 && REG_P (SET_DEST (set
))
1572 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1576 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1577 gen_int_mode (offset
,
1578 GET_MODE (SET_DEST (set
))),
1579 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1580 if (x
!= SET_DEST (set
))
1581 emit_move_insn (SET_DEST (set
), x
);
1586 emit_insn_before (seq
, insn
);
1591 extract_insn (insn
);
1592 insn_code
= INSN_CODE (insn
);
1594 /* Handle a plus involving a virtual register by determining if the
1595 operands remain valid if they're modified in place. */
1596 if (GET_CODE (SET_SRC (set
)) == PLUS
1597 && recog_data
.n_operands
>= 3
1598 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1599 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1600 && CONST_INT_P (recog_data
.operand
[2])
1601 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1603 offset
+= INTVAL (recog_data
.operand
[2]);
1605 /* If the sum is zero, then replace with a plain move. */
1607 && REG_P (SET_DEST (set
))
1608 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1611 emit_move_insn (SET_DEST (set
), new_rtx
);
1615 emit_insn_before (seq
, insn
);
1620 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1622 /* Using validate_change and apply_change_group here leaves
1623 recog_data in an invalid state. Since we know exactly what
1624 we want to check, do those two by hand. */
1625 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1626 && safe_insn_predicate (insn_code
, 2, x
))
1628 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1629 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1632 /* Fall through into the regular operand fixup loop in
1633 order to take care of operands other than 1 and 2. */
1639 extract_insn (insn
);
1640 insn_code
= INSN_CODE (insn
);
1643 /* In the general case, we expect virtual registers to appear only in
1644 operands, and then only as either bare registers or inside memories. */
1645 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1647 x
= recog_data
.operand
[i
];
1648 switch (GET_CODE (x
))
1652 rtx addr
= XEXP (x
, 0);
1654 if (!instantiate_virtual_regs_in_rtx (&addr
))
1658 x
= replace_equiv_address (x
, addr
, true);
1659 /* It may happen that the address with the virtual reg
1660 was valid (e.g. based on the virtual stack reg, which might
1661 be acceptable to the predicates with all offsets), whereas
1662 the address now isn't anymore, for instance when the address
1663 is still offsetted, but the base reg isn't virtual-stack-reg
1664 anymore. Below we would do a force_reg on the whole operand,
1665 but this insn might actually only accept memory. Hence,
1666 before doing that last resort, try to reload the address into
1667 a register, so this operand stays a MEM. */
1668 if (!safe_insn_predicate (insn_code
, i
, x
))
1670 addr
= force_reg (GET_MODE (addr
), addr
);
1671 x
= replace_equiv_address (x
, addr
, true);
1676 emit_insn_before (seq
, insn
);
1681 new_rtx
= instantiate_new_reg (x
, &offset
);
1682 if (new_rtx
== NULL
)
1690 /* Careful, special mode predicates may have stuff in
1691 insn_data[insn_code].operand[i].mode that isn't useful
1692 to us for computing a new value. */
1693 /* ??? Recognize address_operand and/or "p" constraints
1694 to see if (plus new offset) is a valid before we put
1695 this through expand_simple_binop. */
1696 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1697 gen_int_mode (offset
, GET_MODE (x
)),
1698 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1701 emit_insn_before (seq
, insn
);
1706 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1707 if (new_rtx
== NULL
)
1712 new_rtx
= expand_simple_binop
1713 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1714 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1715 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1718 emit_insn_before (seq
, insn
);
1720 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1721 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1729 /* At this point, X contains the new value for the operand.
1730 Validate the new value vs the insn predicate. Note that
1731 asm insns will have insn_code -1 here. */
1732 if (!safe_insn_predicate (insn_code
, i
, x
))
1737 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1738 x
= copy_to_reg (x
);
1741 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1745 emit_insn_before (seq
, insn
);
1748 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1754 /* Propagate operand changes into the duplicates. */
1755 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1756 *recog_data
.dup_loc
[i
]
1757 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1759 /* Force re-recognition of the instruction for validation. */
1760 INSN_CODE (insn
) = -1;
1763 if (asm_noperands (PATTERN (insn
)) >= 0)
1765 if (!check_asm_operands (PATTERN (insn
)))
1767 error_for_asm (insn
, "impossible constraint in %<asm%>");
1768 /* For asm goto, instead of fixing up all the edges
1769 just clear the template and clear input operands
1770 (asm goto doesn't have any output operands). */
1773 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1774 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1775 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1776 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1784 if (recog_memoized (insn
) < 0)
1785 fatal_insn_not_found (insn
);
1789 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1790 do any instantiation required. */
1793 instantiate_decl_rtl (rtx x
)
1800 /* If this is a CONCAT, recurse for the pieces. */
1801 if (GET_CODE (x
) == CONCAT
)
1803 instantiate_decl_rtl (XEXP (x
, 0));
1804 instantiate_decl_rtl (XEXP (x
, 1));
1808 /* If this is not a MEM, no need to do anything. Similarly if the
1809 address is a constant or a register that is not a virtual register. */
1814 if (CONSTANT_P (addr
)
1816 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1817 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1820 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1823 /* Helper for instantiate_decls called via walk_tree: Process all decls
1824 in the given DECL_VALUE_EXPR. */
1827 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1835 if (DECL_RTL_SET_P (t
))
1836 instantiate_decl_rtl (DECL_RTL (t
));
1837 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1838 && DECL_INCOMING_RTL (t
))
1839 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1840 if ((VAR_P (t
) || TREE_CODE (t
) == RESULT_DECL
)
1841 && DECL_HAS_VALUE_EXPR_P (t
))
1843 tree v
= DECL_VALUE_EXPR (t
);
1844 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1851 /* Subroutine of instantiate_decls: Process all decls in the given
1852 BLOCK node and all its subblocks. */
1855 instantiate_decls_1 (tree let
)
1859 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1861 if (DECL_RTL_SET_P (t
))
1862 instantiate_decl_rtl (DECL_RTL (t
));
1863 if (VAR_P (t
) && DECL_HAS_VALUE_EXPR_P (t
))
1865 tree v
= DECL_VALUE_EXPR (t
);
1866 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1870 /* Process all subblocks. */
1871 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1872 instantiate_decls_1 (t
);
1875 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1876 all virtual registers in their DECL_RTL's. */
1879 instantiate_decls (tree fndecl
)
1884 /* Process all parameters of the function. */
1885 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1887 instantiate_decl_rtl (DECL_RTL (decl
));
1888 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1889 if (DECL_HAS_VALUE_EXPR_P (decl
))
1891 tree v
= DECL_VALUE_EXPR (decl
);
1892 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1896 if ((decl
= DECL_RESULT (fndecl
))
1897 && TREE_CODE (decl
) == RESULT_DECL
)
1899 if (DECL_RTL_SET_P (decl
))
1900 instantiate_decl_rtl (DECL_RTL (decl
));
1901 if (DECL_HAS_VALUE_EXPR_P (decl
))
1903 tree v
= DECL_VALUE_EXPR (decl
);
1904 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1908 /* Process the saved static chain if it exists. */
1909 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1910 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1911 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1913 /* Now process all variables defined in the function or its subblocks. */
1914 if (DECL_INITIAL (fndecl
))
1915 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1917 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1918 if (DECL_RTL_SET_P (decl
))
1919 instantiate_decl_rtl (DECL_RTL (decl
));
1920 vec_free (cfun
->local_decls
);
1923 /* Pass through the INSNS of function FNDECL and convert virtual register
1924 references to hard register references. */
1927 instantiate_virtual_regs (void)
1931 /* Compute the offsets to use for this function. */
1932 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1933 var_offset
= targetm
.starting_frame_offset ();
1934 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1935 out_arg_offset
= STACK_POINTER_OFFSET
;
1936 #ifdef FRAME_POINTER_CFA_OFFSET
1937 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1939 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1942 /* Initialize recognition, indicating that volatile is OK. */
1945 /* Scan through all the insns, instantiating every virtual register still
1947 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1950 /* These patterns in the instruction stream can never be recognized.
1951 Fortunately, they shouldn't contain virtual registers either. */
1952 if (GET_CODE (PATTERN (insn
)) == USE
1953 || GET_CODE (PATTERN (insn
)) == CLOBBER
1954 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
1955 || DEBUG_MARKER_INSN_P (insn
))
1957 else if (DEBUG_BIND_INSN_P (insn
))
1958 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn
));
1960 instantiate_virtual_regs_in_insn (insn
);
1962 if (insn
->deleted ())
1965 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1967 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1969 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1972 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1973 instantiate_decls (current_function_decl
);
1975 targetm
.instantiate_decls ();
1977 /* Indicate that, from now on, assign_stack_local should use
1978 frame_pointer_rtx. */
1979 virtuals_instantiated
= 1;
1986 const pass_data pass_data_instantiate_virtual_regs
=
1988 RTL_PASS
, /* type */
1990 OPTGROUP_NONE
, /* optinfo_flags */
1991 TV_NONE
, /* tv_id */
1992 0, /* properties_required */
1993 0, /* properties_provided */
1994 0, /* properties_destroyed */
1995 0, /* todo_flags_start */
1996 0, /* todo_flags_finish */
1999 class pass_instantiate_virtual_regs
: public rtl_opt_pass
2002 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2003 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
2006 /* opt_pass methods: */
2007 virtual unsigned int execute (function
*)
2009 return instantiate_virtual_regs ();
2012 }; // class pass_instantiate_virtual_regs
2017 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2019 return new pass_instantiate_virtual_regs (ctxt
);
2023 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2024 This means a type for which function calls must pass an address to the
2025 function or get an address back from the function.
2026 EXP may be a type node or an expression (whose type is tested). */
2029 aggregate_value_p (const_tree exp
, const_tree fntype
)
2031 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2032 int i
, regno
, nregs
;
2036 switch (TREE_CODE (fntype
))
2040 tree fndecl
= get_callee_fndecl (fntype
);
2042 fntype
= TREE_TYPE (fndecl
);
2043 else if (CALL_EXPR_FN (fntype
))
2044 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2046 /* For internal functions, assume nothing needs to be
2047 returned in memory. */
2052 fntype
= TREE_TYPE (fntype
);
2057 case IDENTIFIER_NODE
:
2061 /* We don't expect other tree types here. */
2065 if (VOID_TYPE_P (type
))
2068 /* If a record should be passed the same as its first (and only) member
2069 don't pass it as an aggregate. */
2070 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2071 return aggregate_value_p (first_field (type
), fntype
);
2073 /* If the front end has decided that this needs to be passed by
2074 reference, do so. */
2075 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2076 && DECL_BY_REFERENCE (exp
))
2079 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2080 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2083 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2084 and thus can't be returned in registers. */
2085 if (TREE_ADDRESSABLE (type
))
2088 if (TYPE_EMPTY_P (type
))
2091 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2094 if (targetm
.calls
.return_in_memory (type
, fntype
))
2097 /* Make sure we have suitable call-clobbered regs to return
2098 the value in; if not, we must return it in memory. */
2099 reg
= hard_function_value (type
, 0, fntype
, 0);
2101 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2106 regno
= REGNO (reg
);
2107 nregs
= hard_regno_nregs (regno
, TYPE_MODE (type
));
2108 for (i
= 0; i
< nregs
; i
++)
2109 if (! call_used_regs
[regno
+ i
])
2115 /* Return true if we should assign DECL a pseudo register; false if it
2116 should live on the local stack. */
2119 use_register_for_decl (const_tree decl
)
2121 if (TREE_CODE (decl
) == SSA_NAME
)
2123 /* We often try to use the SSA_NAME, instead of its underlying
2124 decl, to get type information and guide decisions, to avoid
2125 differences of behavior between anonymous and named
2126 variables, but in this one case we have to go for the actual
2127 variable if there is one. The main reason is that, at least
2128 at -O0, we want to place user variables on the stack, but we
2129 don't mind using pseudos for anonymous or ignored temps.
2130 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2131 should go in pseudos, whereas their corresponding variables
2132 might have to go on the stack. So, disregarding the decl
2133 here would negatively impact debug info at -O0, enable
2134 coalescing between SSA_NAMEs that ought to get different
2135 stack/pseudo assignments, and get the incoming argument
2136 processing thoroughly confused by PARM_DECLs expected to live
2137 in stack slots but assigned to pseudos. */
2138 if (!SSA_NAME_VAR (decl
))
2139 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2140 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2142 decl
= SSA_NAME_VAR (decl
);
2145 /* Honor volatile. */
2146 if (TREE_SIDE_EFFECTS (decl
))
2149 /* Honor addressability. */
2150 if (TREE_ADDRESSABLE (decl
))
2153 /* RESULT_DECLs are a bit special in that they're assigned without
2154 regard to use_register_for_decl, but we generally only store in
2155 them. If we coalesce their SSA NAMEs, we'd better return a
2156 result that matches the assignment in expand_function_start. */
2157 if (TREE_CODE (decl
) == RESULT_DECL
)
2159 /* If it's not an aggregate, we're going to use a REG or a
2160 PARALLEL containing a REG. */
2161 if (!aggregate_value_p (decl
, current_function_decl
))
2164 /* If expand_function_start determines the return value, we'll
2165 use MEM if it's not by reference. */
2166 if (cfun
->returns_pcc_struct
2167 || (targetm
.calls
.struct_value_rtx
2168 (TREE_TYPE (current_function_decl
), 1)))
2169 return DECL_BY_REFERENCE (decl
);
2171 /* Otherwise, we're taking an extra all.function_result_decl
2172 argument. It's set up in assign_parms_augmented_arg_list,
2173 under the (negated) conditions above, and then it's used to
2174 set up the RESULT_DECL rtl in assign_params, after looping
2175 over all parameters. Now, if the RESULT_DECL is not by
2176 reference, we'll use a MEM either way. */
2177 if (!DECL_BY_REFERENCE (decl
))
2180 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2181 the function_result_decl's assignment. Since it's a pointer,
2182 we can short-circuit a number of the tests below, and we must
2183 duplicat e them because we don't have the
2184 function_result_decl to test. */
2185 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2187 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2190 /* We don't set DECL_REGISTER for the function_result_decl. */
2194 /* Decl is implicitly addressible by bound stores and loads
2195 if it is an aggregate holding bounds. */
2196 if (chkp_function_instrumented_p (current_function_decl
)
2198 && !BOUNDED_P (decl
)
2199 && chkp_type_has_pointer (TREE_TYPE (decl
)))
2202 /* Only register-like things go in registers. */
2203 if (DECL_MODE (decl
) == BLKmode
)
2206 /* If -ffloat-store specified, don't put explicit float variables
2208 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2209 propagates values across these stores, and it probably shouldn't. */
2210 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2213 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2216 /* If we're not interested in tracking debugging information for
2217 this decl, then we can certainly put it in a register. */
2218 if (DECL_IGNORED_P (decl
))
2224 if (!DECL_REGISTER (decl
))
2227 /* When not optimizing, disregard register keyword for types that
2228 could have methods, otherwise the methods won't be callable from
2230 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl
)))
2236 /* Structures to communicate between the subroutines of assign_parms.
2237 The first holds data persistent across all parameters, the second
2238 is cleared out for each parameter. */
2240 struct assign_parm_data_all
2242 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2243 should become a job of the target or otherwise encapsulated. */
2244 CUMULATIVE_ARGS args_so_far_v
;
2245 cumulative_args_t args_so_far
;
2246 struct args_size stack_args_size
;
2247 tree function_result_decl
;
2249 rtx_insn
*first_conversion_insn
;
2250 rtx_insn
*last_conversion_insn
;
2251 HOST_WIDE_INT pretend_args_size
;
2252 HOST_WIDE_INT extra_pretend_bytes
;
2253 int reg_parm_stack_space
;
2256 struct assign_parm_data_one
2262 machine_mode nominal_mode
;
2263 machine_mode passed_mode
;
2264 machine_mode promoted_mode
;
2265 struct locate_and_pad_arg_data locate
;
2267 BOOL_BITFIELD named_arg
: 1;
2268 BOOL_BITFIELD passed_pointer
: 1;
2269 BOOL_BITFIELD on_stack
: 1;
2270 BOOL_BITFIELD loaded_in_reg
: 1;
2273 struct bounds_parm_data
2275 assign_parm_data_one parm_data
;
2282 /* A subroutine of assign_parms. Initialize ALL. */
2285 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2287 tree fntype ATTRIBUTE_UNUSED
;
2289 memset (all
, 0, sizeof (*all
));
2291 fntype
= TREE_TYPE (current_function_decl
);
2293 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2294 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2296 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2297 current_function_decl
, -1);
2299 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2301 #ifdef INCOMING_REG_PARM_STACK_SPACE
2302 all
->reg_parm_stack_space
2303 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2307 /* If ARGS contains entries with complex types, split the entry into two
2308 entries of the component type. Return a new list of substitutions are
2309 needed, else the old list. */
2312 split_complex_args (vec
<tree
> *args
)
2317 FOR_EACH_VEC_ELT (*args
, i
, p
)
2319 tree type
= TREE_TYPE (p
);
2320 if (TREE_CODE (type
) == COMPLEX_TYPE
2321 && targetm
.calls
.split_complex_arg (type
))
2324 tree subtype
= TREE_TYPE (type
);
2325 bool addressable
= TREE_ADDRESSABLE (p
);
2327 /* Rewrite the PARM_DECL's type with its component. */
2329 TREE_TYPE (p
) = subtype
;
2330 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2331 SET_DECL_MODE (p
, VOIDmode
);
2332 DECL_SIZE (p
) = NULL
;
2333 DECL_SIZE_UNIT (p
) = NULL
;
2334 /* If this arg must go in memory, put it in a pseudo here.
2335 We can't allow it to go in memory as per normal parms,
2336 because the usual place might not have the imag part
2337 adjacent to the real part. */
2338 DECL_ARTIFICIAL (p
) = addressable
;
2339 DECL_IGNORED_P (p
) = addressable
;
2340 TREE_ADDRESSABLE (p
) = 0;
2344 /* Build a second synthetic decl. */
2345 decl
= build_decl (EXPR_LOCATION (p
),
2346 PARM_DECL
, NULL_TREE
, subtype
);
2347 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2348 DECL_ARTIFICIAL (decl
) = addressable
;
2349 DECL_IGNORED_P (decl
) = addressable
;
2350 layout_decl (decl
, 0);
2351 args
->safe_insert (++i
, decl
);
2356 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2357 the hidden struct return argument, and (abi willing) complex args.
2358 Return the new parameter list. */
2361 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2363 tree fndecl
= current_function_decl
;
2364 tree fntype
= TREE_TYPE (fndecl
);
2365 vec
<tree
> fnargs
= vNULL
;
2368 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2369 fnargs
.safe_push (arg
);
2371 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2373 /* If struct value address is treated as the first argument, make it so. */
2374 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2375 && ! cfun
->returns_pcc_struct
2376 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2378 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2381 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2382 PARM_DECL
, get_identifier (".result_ptr"), type
);
2383 DECL_ARG_TYPE (decl
) = type
;
2384 DECL_ARTIFICIAL (decl
) = 1;
2385 DECL_NAMELESS (decl
) = 1;
2386 TREE_CONSTANT (decl
) = 1;
2387 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2388 changes, the end of the RESULT_DECL handling block in
2389 use_register_for_decl must be adjusted to match. */
2391 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2392 all
->orig_fnargs
= decl
;
2393 fnargs
.safe_insert (0, decl
);
2395 all
->function_result_decl
= decl
;
2397 /* If function is instrumented then bounds of the
2398 passed structure address is the second argument. */
2399 if (chkp_function_instrumented_p (fndecl
))
2401 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2402 PARM_DECL
, get_identifier (".result_bnd"),
2403 pointer_bounds_type_node
);
2404 DECL_ARG_TYPE (decl
) = pointer_bounds_type_node
;
2405 DECL_ARTIFICIAL (decl
) = 1;
2406 DECL_NAMELESS (decl
) = 1;
2407 TREE_CONSTANT (decl
) = 1;
2409 DECL_CHAIN (decl
) = DECL_CHAIN (all
->orig_fnargs
);
2410 DECL_CHAIN (all
->orig_fnargs
) = decl
;
2411 fnargs
.safe_insert (1, decl
);
2415 /* If the target wants to split complex arguments into scalars, do so. */
2416 if (targetm
.calls
.split_complex_arg
)
2417 split_complex_args (&fnargs
);
2422 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2423 data for the parameter. Incorporate ABI specifics such as pass-by-
2424 reference and type promotion. */
2427 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2428 struct assign_parm_data_one
*data
)
2430 tree nominal_type
, passed_type
;
2431 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2434 memset (data
, 0, sizeof (*data
));
2436 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2438 data
->named_arg
= 1; /* No variadic parms. */
2439 else if (DECL_CHAIN (parm
))
2440 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2441 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2442 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2444 data
->named_arg
= 0; /* Treat as variadic. */
2446 nominal_type
= TREE_TYPE (parm
);
2447 passed_type
= DECL_ARG_TYPE (parm
);
2449 /* Look out for errors propagating this far. Also, if the parameter's
2450 type is void then its value doesn't matter. */
2451 if (TREE_TYPE (parm
) == error_mark_node
2452 /* This can happen after weird syntax errors
2453 or if an enum type is defined among the parms. */
2454 || TREE_CODE (parm
) != PARM_DECL
2455 || passed_type
== NULL
2456 || VOID_TYPE_P (nominal_type
))
2458 nominal_type
= passed_type
= void_type_node
;
2459 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2463 /* Find mode of arg as it is passed, and mode of arg as it should be
2464 during execution of this function. */
2465 passed_mode
= TYPE_MODE (passed_type
);
2466 nominal_mode
= TYPE_MODE (nominal_type
);
2468 /* If the parm is to be passed as a transparent union or record, use the
2469 type of the first field for the tests below. We have already verified
2470 that the modes are the same. */
2471 if ((TREE_CODE (passed_type
) == UNION_TYPE
2472 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2473 && TYPE_TRANSPARENT_AGGR (passed_type
))
2474 passed_type
= TREE_TYPE (first_field (passed_type
));
2476 /* See if this arg was passed by invisible reference. */
2477 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2478 passed_type
, data
->named_arg
))
2480 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2481 data
->passed_pointer
= true;
2482 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2485 /* Find mode as it is passed by the ABI. */
2486 unsignedp
= TYPE_UNSIGNED (passed_type
);
2487 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2488 TREE_TYPE (current_function_decl
), 0);
2491 data
->nominal_type
= nominal_type
;
2492 data
->passed_type
= passed_type
;
2493 data
->nominal_mode
= nominal_mode
;
2494 data
->passed_mode
= passed_mode
;
2495 data
->promoted_mode
= promoted_mode
;
2498 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2501 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2502 struct assign_parm_data_one
*data
, bool no_rtl
)
2504 int varargs_pretend_bytes
= 0;
2506 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2507 data
->promoted_mode
,
2509 &varargs_pretend_bytes
, no_rtl
);
2511 /* If the back-end has requested extra stack space, record how much is
2512 needed. Do not change pretend_args_size otherwise since it may be
2513 nonzero from an earlier partial argument. */
2514 if (varargs_pretend_bytes
> 0)
2515 all
->pretend_args_size
= varargs_pretend_bytes
;
2518 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2519 the incoming location of the current parameter. */
2522 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2523 struct assign_parm_data_one
*data
)
2525 HOST_WIDE_INT pretend_bytes
= 0;
2529 if (data
->promoted_mode
== VOIDmode
)
2531 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2535 targetm
.calls
.warn_parameter_passing_abi (all
->args_so_far
,
2538 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2539 data
->promoted_mode
,
2543 if (entry_parm
== 0)
2544 data
->promoted_mode
= data
->passed_mode
;
2546 /* Determine parm's home in the stack, in case it arrives in the stack
2547 or we should pretend it did. Compute the stack position and rtx where
2548 the argument arrives and its size.
2550 There is one complexity here: If this was a parameter that would
2551 have been passed in registers, but wasn't only because it is
2552 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2553 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2554 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2555 as it was the previous time. */
2556 in_regs
= (entry_parm
!= 0) || POINTER_BOUNDS_TYPE_P (data
->passed_type
);
2557 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2560 if (!in_regs
&& !data
->named_arg
)
2562 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2565 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2566 data
->promoted_mode
,
2567 data
->passed_type
, true);
2568 in_regs
= tem
!= NULL
;
2572 /* If this parameter was passed both in registers and in the stack, use
2573 the copy on the stack. */
2574 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2582 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2583 data
->promoted_mode
,
2586 data
->partial
= partial
;
2588 /* The caller might already have allocated stack space for the
2589 register parameters. */
2590 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2592 /* Part of this argument is passed in registers and part
2593 is passed on the stack. Ask the prologue code to extend
2594 the stack part so that we can recreate the full value.
2596 PRETEND_BYTES is the size of the registers we need to store.
2597 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2598 stack space that the prologue should allocate.
2600 Internally, gcc assumes that the argument pointer is aligned
2601 to STACK_BOUNDARY bits. This is used both for alignment
2602 optimizations (see init_emit) and to locate arguments that are
2603 aligned to more than PARM_BOUNDARY bits. We must preserve this
2604 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2605 a stack boundary. */
2607 /* We assume at most one partial arg, and it must be the first
2608 argument on the stack. */
2609 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2611 pretend_bytes
= partial
;
2612 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2614 /* We want to align relative to the actual stack pointer, so
2615 don't include this in the stack size until later. */
2616 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2620 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2621 all
->reg_parm_stack_space
,
2622 entry_parm
? data
->partial
: 0, current_function_decl
,
2623 &all
->stack_args_size
, &data
->locate
);
2625 /* Update parm_stack_boundary if this parameter is passed in the
2627 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2628 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2630 /* Adjust offsets to include the pretend args. */
2631 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2632 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2633 data
->locate
.offset
.constant
+= pretend_bytes
;
2635 data
->entry_parm
= entry_parm
;
2638 /* A subroutine of assign_parms. If there is actually space on the stack
2639 for this parm, count it in stack_args_size and return true. */
2642 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2643 struct assign_parm_data_one
*data
)
2645 /* Bounds are never passed on the stack to keep compatibility
2646 with not instrumented code. */
2647 if (POINTER_BOUNDS_TYPE_P (data
->passed_type
))
2649 /* Trivially true if we've no incoming register. */
2650 else if (data
->entry_parm
== NULL
)
2652 /* Also true if we're partially in registers and partially not,
2653 since we've arranged to drop the entire argument on the stack. */
2654 else if (data
->partial
!= 0)
2656 /* Also true if the target says that it's passed in both registers
2657 and on the stack. */
2658 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2659 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2661 /* Also true if the target says that there's stack allocated for
2662 all register parameters. */
2663 else if (all
->reg_parm_stack_space
> 0)
2665 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2669 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2670 if (data
->locate
.size
.var
)
2671 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2676 /* A subroutine of assign_parms. Given that this parameter is allocated
2677 stack space by the ABI, find it. */
2680 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2682 rtx offset_rtx
, stack_parm
;
2683 unsigned int align
, boundary
;
2685 /* If we're passing this arg using a reg, make its stack home the
2686 aligned stack slot. */
2687 if (data
->entry_parm
)
2688 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2690 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2692 stack_parm
= crtl
->args
.internal_arg_pointer
;
2693 if (offset_rtx
!= const0_rtx
)
2694 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2695 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2697 if (!data
->passed_pointer
)
2699 set_mem_attributes (stack_parm
, parm
, 1);
2700 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2701 while promoted mode's size is needed. */
2702 if (data
->promoted_mode
!= BLKmode
2703 && data
->promoted_mode
!= DECL_MODE (parm
))
2705 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2706 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2708 poly_int64 offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2709 data
->promoted_mode
);
2710 if (maybe_ne (offset
, 0))
2711 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2716 boundary
= data
->locate
.boundary
;
2717 align
= BITS_PER_UNIT
;
2719 /* If we're padding upward, we know that the alignment of the slot
2720 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2721 intentionally forcing upward padding. Otherwise we have to come
2722 up with a guess at the alignment based on OFFSET_RTX. */
2723 if (data
->locate
.where_pad
!= PAD_DOWNWARD
|| data
->entry_parm
)
2725 else if (CONST_INT_P (offset_rtx
))
2727 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2728 align
= least_bit_hwi (align
);
2730 set_mem_align (stack_parm
, align
);
2732 if (data
->entry_parm
)
2733 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2735 data
->stack_parm
= stack_parm
;
2738 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2739 always valid and contiguous. */
2742 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2744 rtx entry_parm
= data
->entry_parm
;
2745 rtx stack_parm
= data
->stack_parm
;
2747 /* If this parm was passed part in regs and part in memory, pretend it
2748 arrived entirely in memory by pushing the register-part onto the stack.
2749 In the special case of a DImode or DFmode that is split, we could put
2750 it together in a pseudoreg directly, but for now that's not worth
2752 if (data
->partial
!= 0)
2754 /* Handle calls that pass values in multiple non-contiguous
2755 locations. The Irix 6 ABI has examples of this. */
2756 if (GET_CODE (entry_parm
) == PARALLEL
)
2757 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2759 int_size_in_bytes (data
->passed_type
));
2762 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2763 move_block_from_reg (REGNO (entry_parm
),
2764 validize_mem (copy_rtx (stack_parm
)),
2765 data
->partial
/ UNITS_PER_WORD
);
2768 entry_parm
= stack_parm
;
2771 /* If we didn't decide this parm came in a register, by default it came
2773 else if (entry_parm
== NULL
)
2774 entry_parm
= stack_parm
;
2776 /* When an argument is passed in multiple locations, we can't make use
2777 of this information, but we can save some copying if the whole argument
2778 is passed in a single register. */
2779 else if (GET_CODE (entry_parm
) == PARALLEL
2780 && data
->nominal_mode
!= BLKmode
2781 && data
->passed_mode
!= BLKmode
)
2783 size_t i
, len
= XVECLEN (entry_parm
, 0);
2785 for (i
= 0; i
< len
; i
++)
2786 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2787 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2788 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2789 == data
->passed_mode
)
2790 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2792 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2797 data
->entry_parm
= entry_parm
;
2800 /* A subroutine of assign_parms. Reconstitute any values which were
2801 passed in multiple registers and would fit in a single register. */
2804 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2806 rtx entry_parm
= data
->entry_parm
;
2808 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2809 This can be done with register operations rather than on the
2810 stack, even if we will store the reconstituted parameter on the
2812 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2814 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2815 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2816 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2817 entry_parm
= parmreg
;
2820 data
->entry_parm
= entry_parm
;
2823 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2824 always valid and properly aligned. */
2827 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2829 rtx stack_parm
= data
->stack_parm
;
2831 /* If we can't trust the parm stack slot to be aligned enough for its
2832 ultimate type, don't use that slot after entry. We'll make another
2833 stack slot, if we need one. */
2835 && ((STRICT_ALIGNMENT
2836 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2837 || (data
->nominal_type
2838 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2839 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2842 /* If parm was passed in memory, and we need to convert it on entry,
2843 don't store it back in that same slot. */
2844 else if (data
->entry_parm
== stack_parm
2845 && data
->nominal_mode
!= BLKmode
2846 && data
->nominal_mode
!= data
->passed_mode
)
2849 /* If stack protection is in effect for this function, don't leave any
2850 pointers in their passed stack slots. */
2851 else if (crtl
->stack_protect_guard
2852 && (flag_stack_protect
== 2
2853 || data
->passed_pointer
2854 || POINTER_TYPE_P (data
->nominal_type
)))
2857 data
->stack_parm
= stack_parm
;
2860 /* A subroutine of assign_parms. Return true if the current parameter
2861 should be stored as a BLKmode in the current frame. */
2864 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2866 if (data
->nominal_mode
== BLKmode
)
2868 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2871 #ifdef BLOCK_REG_PADDING
2872 /* Only assign_parm_setup_block knows how to deal with register arguments
2873 that are padded at the least significant end. */
2874 if (REG_P (data
->entry_parm
)
2875 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2876 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2877 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2884 /* A subroutine of assign_parms. Arrange for the parameter to be
2885 present and valid in DATA->STACK_RTL. */
2888 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2889 tree parm
, struct assign_parm_data_one
*data
)
2891 rtx entry_parm
= data
->entry_parm
;
2892 rtx stack_parm
= data
->stack_parm
;
2893 rtx target_reg
= NULL_RTX
;
2894 bool in_conversion_seq
= false;
2896 HOST_WIDE_INT size_stored
;
2898 if (GET_CODE (entry_parm
) == PARALLEL
)
2899 entry_parm
= emit_group_move_into_temps (entry_parm
);
2901 /* If we want the parameter in a pseudo, don't use a stack slot. */
2902 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2904 tree def
= ssa_default_def (cfun
, parm
);
2906 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2907 rtx reg
= gen_reg_rtx (mode
);
2908 if (GET_CODE (reg
) != CONCAT
)
2913 /* Avoid allocating a stack slot, if there isn't one
2914 preallocated by the ABI. It might seem like we should
2915 always prefer a pseudo, but converting between
2916 floating-point and integer modes goes through the stack
2917 on various machines, so it's better to use the reserved
2918 stack slot than to risk wasting it and allocating more
2919 for the conversion. */
2920 if (stack_parm
== NULL_RTX
)
2922 int save
= generating_concat_p
;
2923 generating_concat_p
= 0;
2924 stack_parm
= gen_reg_rtx (mode
);
2925 generating_concat_p
= save
;
2928 data
->stack_parm
= NULL
;
2931 size
= int_size_in_bytes (data
->passed_type
);
2932 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2933 if (stack_parm
== 0)
2935 SET_DECL_ALIGN (parm
, MAX (DECL_ALIGN (parm
), BITS_PER_WORD
));
2936 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2938 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2939 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2940 set_mem_attributes (stack_parm
, parm
, 1);
2943 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2944 calls that pass values in multiple non-contiguous locations. */
2945 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2949 /* Note that we will be storing an integral number of words.
2950 So we have to be careful to ensure that we allocate an
2951 integral number of words. We do this above when we call
2952 assign_stack_local if space was not allocated in the argument
2953 list. If it was, this will not work if PARM_BOUNDARY is not
2954 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2955 if it becomes a problem. Exception is when BLKmode arrives
2956 with arguments not conforming to word_mode. */
2958 if (data
->stack_parm
== 0)
2960 else if (GET_CODE (entry_parm
) == PARALLEL
)
2963 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2965 mem
= validize_mem (copy_rtx (stack_parm
));
2967 /* Handle values in multiple non-contiguous locations. */
2968 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2969 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2970 else if (GET_CODE (entry_parm
) == PARALLEL
)
2972 push_to_sequence2 (all
->first_conversion_insn
,
2973 all
->last_conversion_insn
);
2974 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2975 all
->first_conversion_insn
= get_insns ();
2976 all
->last_conversion_insn
= get_last_insn ();
2978 in_conversion_seq
= true;
2984 /* If SIZE is that of a mode no bigger than a word, just use
2985 that mode's store operation. */
2986 else if (size
<= UNITS_PER_WORD
)
2988 unsigned int bits
= size
* BITS_PER_UNIT
;
2989 machine_mode mode
= int_mode_for_size (bits
, 0).else_blk ();
2992 #ifdef BLOCK_REG_PADDING
2993 && (size
== UNITS_PER_WORD
2994 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2995 != (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
3001 /* We are really truncating a word_mode value containing
3002 SIZE bytes into a value of mode MODE. If such an
3003 operation requires no actual instructions, we can refer
3004 to the value directly in mode MODE, otherwise we must
3005 start with the register in word_mode and explicitly
3007 if (targetm
.truly_noop_truncation (size
* BITS_PER_UNIT
,
3009 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3012 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3013 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3015 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3018 #ifdef BLOCK_REG_PADDING
3019 /* Storing the register in memory as a full word, as
3020 move_block_from_reg below would do, and then using the
3021 MEM in a smaller mode, has the effect of shifting right
3022 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3023 shifting must be explicit. */
3024 else if (!MEM_P (mem
))
3028 /* If the assert below fails, we should have taken the
3029 mode != BLKmode path above, unless we have downward
3030 padding of smaller-than-word arguments on a machine
3031 with little-endian bytes, which would likely require
3032 additional changes to work correctly. */
3033 gcc_checking_assert (BYTES_BIG_ENDIAN
3034 && (BLOCK_REG_PADDING (mode
,
3035 data
->passed_type
, 1)
3038 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3040 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3041 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3043 x
= force_reg (word_mode
, x
);
3044 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3046 emit_move_insn (mem
, x
);
3050 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3051 machine must be aligned to the left before storing
3052 to memory. Note that the previous test doesn't
3053 handle all cases (e.g. SIZE == 3). */
3054 else if (size
!= UNITS_PER_WORD
3055 #ifdef BLOCK_REG_PADDING
3056 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
3064 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3065 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3067 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3068 tem
= change_address (mem
, word_mode
, 0);
3069 emit_move_insn (tem
, x
);
3072 move_block_from_reg (REGNO (entry_parm
), mem
,
3073 size_stored
/ UNITS_PER_WORD
);
3075 else if (!MEM_P (mem
))
3077 gcc_checking_assert (size
> UNITS_PER_WORD
);
3078 #ifdef BLOCK_REG_PADDING
3079 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3080 data
->passed_type
, 0)
3083 emit_move_insn (mem
, entry_parm
);
3086 move_block_from_reg (REGNO (entry_parm
), mem
,
3087 size_stored
/ UNITS_PER_WORD
);
3089 else if (data
->stack_parm
== 0)
3091 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3092 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3094 all
->first_conversion_insn
= get_insns ();
3095 all
->last_conversion_insn
= get_last_insn ();
3097 in_conversion_seq
= true;
3102 if (!in_conversion_seq
)
3103 emit_move_insn (target_reg
, stack_parm
);
3106 push_to_sequence2 (all
->first_conversion_insn
,
3107 all
->last_conversion_insn
);
3108 emit_move_insn (target_reg
, stack_parm
);
3109 all
->first_conversion_insn
= get_insns ();
3110 all
->last_conversion_insn
= get_last_insn ();
3113 stack_parm
= target_reg
;
3116 data
->stack_parm
= stack_parm
;
3117 set_parm_rtl (parm
, stack_parm
);
3120 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3121 parameter. Get it there. Perform all ABI specified conversions. */
3124 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3125 struct assign_parm_data_one
*data
)
3127 rtx parmreg
, validated_mem
;
3128 rtx equiv_stack_parm
;
3129 machine_mode promoted_nominal_mode
;
3130 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3131 bool did_conversion
= false;
3132 bool need_conversion
, moved
;
3135 /* Store the parm in a pseudoregister during the function, but we may
3136 need to do it in a wider mode. Using 2 here makes the result
3137 consistent with promote_decl_mode and thus expand_expr_real_1. */
3138 promoted_nominal_mode
3139 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3140 TREE_TYPE (current_function_decl
), 2);
3142 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3143 if (!DECL_ARTIFICIAL (parm
))
3144 mark_user_reg (parmreg
);
3146 /* If this was an item that we received a pointer to,
3147 set rtl appropriately. */
3148 if (data
->passed_pointer
)
3150 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3151 set_mem_attributes (rtl
, parm
, 1);
3156 assign_parm_remove_parallels (data
);
3158 /* Copy the value into the register, thus bridging between
3159 assign_parm_find_data_types and expand_expr_real_1. */
3161 equiv_stack_parm
= data
->stack_parm
;
3162 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3164 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3165 || promoted_nominal_mode
!= data
->promoted_mode
);
3169 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3170 && data
->nominal_mode
== data
->passed_mode
3171 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3173 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3174 mode, by the caller. We now have to convert it to
3175 NOMINAL_MODE, if different. However, PARMREG may be in
3176 a different mode than NOMINAL_MODE if it is being stored
3179 If ENTRY_PARM is a hard register, it might be in a register
3180 not valid for operating in its mode (e.g., an odd-numbered
3181 register for a DFmode). In that case, moves are the only
3182 thing valid, so we can't do a convert from there. This
3183 occurs when the calling sequence allow such misaligned
3186 In addition, the conversion may involve a call, which could
3187 clobber parameters which haven't been copied to pseudo
3190 First, we try to emit an insn which performs the necessary
3191 conversion. We verify that this insn does not clobber any
3194 enum insn_code icode
;
3197 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3201 op1
= validated_mem
;
3202 if (icode
!= CODE_FOR_nothing
3203 && insn_operand_matches (icode
, 0, op0
)
3204 && insn_operand_matches (icode
, 1, op1
))
3206 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3207 rtx_insn
*insn
, *insns
;
3209 HARD_REG_SET hardregs
;
3212 /* If op1 is a hard register that is likely spilled, first
3213 force it into a pseudo, otherwise combiner might extend
3214 its lifetime too much. */
3215 if (GET_CODE (t
) == SUBREG
)
3218 && HARD_REGISTER_P (t
)
3219 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3220 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3222 t
= gen_reg_rtx (GET_MODE (op1
));
3223 emit_move_insn (t
, op1
);
3227 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3228 data
->passed_mode
, unsignedp
);
3230 insns
= get_insns ();
3233 CLEAR_HARD_REG_SET (hardregs
);
3234 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3237 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3239 if (!hard_reg_set_empty_p (hardregs
))
3248 if (equiv_stack_parm
!= NULL_RTX
)
3249 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3256 /* Nothing to do. */
3258 else if (need_conversion
)
3260 /* We did not have an insn to convert directly, or the sequence
3261 generated appeared unsafe. We must first copy the parm to a
3262 pseudo reg, and save the conversion until after all
3263 parameters have been moved. */
3266 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3268 emit_move_insn (tempreg
, validated_mem
);
3270 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3271 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3273 if (partial_subreg_p (tempreg
)
3274 && GET_MODE (tempreg
) == data
->nominal_mode
3275 && REG_P (SUBREG_REG (tempreg
))
3276 && data
->nominal_mode
== data
->passed_mode
3277 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
))
3279 /* The argument is already sign/zero extended, so note it
3281 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3282 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3285 /* TREE_USED gets set erroneously during expand_assignment. */
3286 save_tree_used
= TREE_USED (parm
);
3287 SET_DECL_RTL (parm
, rtl
);
3288 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3289 SET_DECL_RTL (parm
, NULL_RTX
);
3290 TREE_USED (parm
) = save_tree_used
;
3291 all
->first_conversion_insn
= get_insns ();
3292 all
->last_conversion_insn
= get_last_insn ();
3295 did_conversion
= true;
3298 emit_move_insn (parmreg
, validated_mem
);
3300 /* If we were passed a pointer but the actual value can safely live
3301 in a register, retrieve it and use it directly. */
3302 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3304 /* We can't use nominal_mode, because it will have been set to
3305 Pmode above. We must use the actual mode of the parm. */
3306 if (use_register_for_decl (parm
))
3308 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3309 mark_user_reg (parmreg
);
3313 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3314 TYPE_MODE (TREE_TYPE (parm
)),
3315 TYPE_ALIGN (TREE_TYPE (parm
)));
3317 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3318 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3320 set_mem_attributes (parmreg
, parm
, 1);
3323 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3324 the debug info in case it is not legitimate. */
3325 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3327 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3328 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3330 push_to_sequence2 (all
->first_conversion_insn
,
3331 all
->last_conversion_insn
);
3332 emit_move_insn (tempreg
, rtl
);
3333 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3334 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3336 all
->first_conversion_insn
= get_insns ();
3337 all
->last_conversion_insn
= get_last_insn ();
3340 did_conversion
= true;
3343 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3347 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3349 data
->stack_parm
= NULL
;
3352 set_parm_rtl (parm
, rtl
);
3354 /* Mark the register as eliminable if we did no conversion and it was
3355 copied from memory at a fixed offset, and the arg pointer was not
3356 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3357 offset formed an invalid address, such memory-equivalences as we
3358 make here would screw up life analysis for it. */
3359 if (data
->nominal_mode
== data
->passed_mode
3361 && data
->stack_parm
!= 0
3362 && MEM_P (data
->stack_parm
)
3363 && data
->locate
.offset
.var
== 0
3364 && reg_mentioned_p (virtual_incoming_args_rtx
,
3365 XEXP (data
->stack_parm
, 0)))
3367 rtx_insn
*linsn
= get_last_insn ();
3371 /* Mark complex types separately. */
3372 if (GET_CODE (parmreg
) == CONCAT
)
3374 scalar_mode submode
= GET_MODE_INNER (GET_MODE (parmreg
));
3375 int regnor
= REGNO (XEXP (parmreg
, 0));
3376 int regnoi
= REGNO (XEXP (parmreg
, 1));
3377 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3378 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3379 GET_MODE_SIZE (submode
));
3381 /* Scan backwards for the set of the real and
3383 for (sinsn
= linsn
; sinsn
!= 0;
3384 sinsn
= prev_nonnote_insn (sinsn
))
3386 set
= single_set (sinsn
);
3390 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3391 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3392 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3393 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3397 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3400 /* For pointer data type, suggest pointer register. */
3401 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3402 mark_reg_pointer (parmreg
,
3403 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3406 /* A subroutine of assign_parms. Allocate stack space to hold the current
3407 parameter. Get it there. Perform all ABI specified conversions. */
3410 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3411 struct assign_parm_data_one
*data
)
3413 /* Value must be stored in the stack slot STACK_PARM during function
3415 bool to_conversion
= false;
3417 assign_parm_remove_parallels (data
);
3419 if (data
->promoted_mode
!= data
->nominal_mode
)
3421 /* Conversion is required. */
3422 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3424 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3426 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3427 to_conversion
= true;
3429 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3430 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3432 if (data
->stack_parm
)
3435 = subreg_lowpart_offset (data
->nominal_mode
,
3436 GET_MODE (data
->stack_parm
));
3437 /* ??? This may need a big-endian conversion on sparc64. */
3439 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3440 if (maybe_ne (offset
, 0) && MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3441 set_mem_offset (data
->stack_parm
,
3442 MEM_OFFSET (data
->stack_parm
) + offset
);
3446 if (data
->entry_parm
!= data
->stack_parm
)
3450 if (data
->stack_parm
== 0)
3452 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3453 GET_MODE (data
->entry_parm
),
3454 TYPE_ALIGN (data
->passed_type
));
3456 = assign_stack_local (GET_MODE (data
->entry_parm
),
3457 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3459 set_mem_attributes (data
->stack_parm
, parm
, 1);
3462 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3463 src
= validize_mem (copy_rtx (data
->entry_parm
));
3467 /* Use a block move to handle potentially misaligned entry_parm. */
3469 push_to_sequence2 (all
->first_conversion_insn
,
3470 all
->last_conversion_insn
);
3471 to_conversion
= true;
3473 emit_block_move (dest
, src
,
3474 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3480 src
= force_reg (GET_MODE (src
), src
);
3481 emit_move_insn (dest
, src
);
3487 all
->first_conversion_insn
= get_insns ();
3488 all
->last_conversion_insn
= get_last_insn ();
3492 set_parm_rtl (parm
, data
->stack_parm
);
3495 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3496 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3499 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3503 tree orig_fnargs
= all
->orig_fnargs
;
3506 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3508 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3509 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3511 rtx tmp
, real
, imag
;
3512 scalar_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3514 real
= DECL_RTL (fnargs
[i
]);
3515 imag
= DECL_RTL (fnargs
[i
+ 1]);
3516 if (inner
!= GET_MODE (real
))
3518 real
= gen_lowpart_SUBREG (inner
, real
);
3519 imag
= gen_lowpart_SUBREG (inner
, imag
);
3522 if (TREE_ADDRESSABLE (parm
))
3525 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3526 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3528 TYPE_ALIGN (TREE_TYPE (parm
)));
3530 /* split_complex_arg put the real and imag parts in
3531 pseudos. Move them to memory. */
3532 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3533 set_mem_attributes (tmp
, parm
, 1);
3534 rmem
= adjust_address_nv (tmp
, inner
, 0);
3535 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3536 push_to_sequence2 (all
->first_conversion_insn
,
3537 all
->last_conversion_insn
);
3538 emit_move_insn (rmem
, real
);
3539 emit_move_insn (imem
, imag
);
3540 all
->first_conversion_insn
= get_insns ();
3541 all
->last_conversion_insn
= get_last_insn ();
3545 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3546 set_parm_rtl (parm
, tmp
);
3548 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3549 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3550 if (inner
!= GET_MODE (real
))
3552 real
= gen_lowpart_SUBREG (inner
, real
);
3553 imag
= gen_lowpart_SUBREG (inner
, imag
);
3555 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3556 set_decl_incoming_rtl (parm
, tmp
, false);
3562 /* Load bounds of PARM from bounds table. */
3564 assign_parm_load_bounds (struct assign_parm_data_one
*data
,
3570 unsigned i
, offs
= 0;
3572 rtx slot
= NULL
, ptr
= NULL
;
3577 bitmap_obstack_initialize (NULL
);
3578 slots
= BITMAP_ALLOC (NULL
);
3579 chkp_find_bound_slots (TREE_TYPE (parm
), slots
);
3580 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, i
, bi
)
3590 BITMAP_FREE (slots
);
3591 bitmap_obstack_release (NULL
);
3594 /* We may have bounds not associated with any pointer. */
3596 offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
3598 /* Find associated pointer. */
3601 /* If bounds are not associated with any bounds,
3602 then it is passed in a register or special slot. */
3603 gcc_assert (data
->entry_parm
);
3606 else if (MEM_P (entry
))
3607 slot
= adjust_address (entry
, Pmode
, offs
);
3608 else if (REG_P (entry
))
3609 ptr
= gen_rtx_REG (Pmode
, REGNO (entry
) + bnd_no
);
3610 else if (GET_CODE (entry
) == PARALLEL
)
3611 ptr
= chkp_get_value_with_offs (entry
, GEN_INT (offs
));
3614 data
->entry_parm
= targetm
.calls
.load_bounds_for_arg (slot
, ptr
,
3618 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3621 assign_bounds (vec
<bounds_parm_data
> &bndargs
,
3622 struct assign_parm_data_all
&all
,
3623 bool assign_regs
, bool assign_special
,
3627 bounds_parm_data
*pbdata
;
3629 if (!bndargs
.exists ())
3632 /* We make few passes to store input bounds. Firstly handle bounds
3633 passed in registers. After that we load bounds passed in special
3634 slots. Finally we load bounds from Bounds Table. */
3635 for (pass
= 0; pass
< 3; pass
++)
3636 FOR_EACH_VEC_ELT (bndargs
, i
, pbdata
)
3638 /* Pass 0 => regs only. */
3641 ||(!pbdata
->parm_data
.entry_parm
3642 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)))
3644 /* Pass 1 => slots only. */
3647 || (!pbdata
->parm_data
.entry_parm
3648 || GET_CODE (pbdata
->parm_data
.entry_parm
) == REG
)))
3650 /* Pass 2 => BT only. */
3653 || pbdata
->parm_data
.entry_parm
))
3656 if (!pbdata
->parm_data
.entry_parm
3657 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)
3658 assign_parm_load_bounds (&pbdata
->parm_data
, pbdata
->ptr_parm
,
3659 pbdata
->ptr_entry
, pbdata
->bound_no
);
3661 set_decl_incoming_rtl (pbdata
->bounds_parm
,
3662 pbdata
->parm_data
.entry_parm
, false);
3664 if (assign_parm_setup_block_p (&pbdata
->parm_data
))
3665 assign_parm_setup_block (&all
, pbdata
->bounds_parm
,
3666 &pbdata
->parm_data
);
3667 else if (pbdata
->parm_data
.passed_pointer
3668 || use_register_for_decl (pbdata
->bounds_parm
))
3669 assign_parm_setup_reg (&all
, pbdata
->bounds_parm
,
3670 &pbdata
->parm_data
);
3672 assign_parm_setup_stack (&all
, pbdata
->bounds_parm
,
3673 &pbdata
->parm_data
);
3677 /* Assign RTL expressions to the function's parameters. This may involve
3678 copying them into registers and using those registers as the DECL_RTL. */
3681 assign_parms (tree fndecl
)
3683 struct assign_parm_data_all all
;
3686 unsigned i
, bound_no
= 0;
3687 tree last_arg
= NULL
;
3688 rtx last_arg_entry
= NULL
;
3689 vec
<bounds_parm_data
> bndargs
= vNULL
;
3690 bounds_parm_data bdata
;
3692 crtl
->args
.internal_arg_pointer
3693 = targetm
.calls
.internal_arg_pointer ();
3695 assign_parms_initialize_all (&all
);
3696 fnargs
= assign_parms_augmented_arg_list (&all
);
3698 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3700 struct assign_parm_data_one data
;
3702 /* Extract the type of PARM; adjust it according to ABI. */
3703 assign_parm_find_data_types (&all
, parm
, &data
);
3705 /* Early out for errors and void parameters. */
3706 if (data
.passed_mode
== VOIDmode
)
3708 SET_DECL_RTL (parm
, const0_rtx
);
3709 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3713 /* Estimate stack alignment from parameter alignment. */
3714 if (SUPPORTS_STACK_ALIGNMENT
)
3717 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3719 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3721 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3722 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3723 TYPE_MODE (data
.nominal_type
),
3724 TYPE_ALIGN (data
.nominal_type
));
3725 if (crtl
->stack_alignment_estimated
< align
)
3727 gcc_assert (!crtl
->stack_realign_processed
);
3728 crtl
->stack_alignment_estimated
= align
;
3732 /* Find out where the parameter arrives in this function. */
3733 assign_parm_find_entry_rtl (&all
, &data
);
3735 /* Find out where stack space for this parameter might be. */
3736 if (assign_parm_is_stack_parm (&all
, &data
))
3738 assign_parm_find_stack_rtl (parm
, &data
);
3739 assign_parm_adjust_entry_rtl (&data
);
3741 if (!POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3743 /* Remember where last non bounds arg was passed in case
3744 we have to load associated bounds for it from Bounds
3747 last_arg_entry
= data
.entry_parm
;
3750 /* Record permanently how this parm was passed. */
3751 if (data
.passed_pointer
)
3754 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3756 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3759 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3761 assign_parm_adjust_stack_rtl (&data
);
3763 /* Bounds should be loaded in the particular order to
3764 have registers allocated correctly. Collect info about
3765 input bounds and load them later. */
3766 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3768 /* Expect bounds in instrumented functions only. */
3769 gcc_assert (chkp_function_instrumented_p (fndecl
));
3771 bdata
.parm_data
= data
;
3772 bdata
.bounds_parm
= parm
;
3773 bdata
.ptr_parm
= last_arg
;
3774 bdata
.ptr_entry
= last_arg_entry
;
3775 bdata
.bound_no
= bound_no
;
3776 bndargs
.safe_push (bdata
);
3780 if (assign_parm_setup_block_p (&data
))
3781 assign_parm_setup_block (&all
, parm
, &data
);
3782 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3783 assign_parm_setup_reg (&all
, parm
, &data
);
3785 assign_parm_setup_stack (&all
, parm
, &data
);
3788 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3790 int pretend_bytes
= 0;
3792 assign_parms_setup_varargs (&all
, &data
, false);
3794 if (chkp_function_instrumented_p (fndecl
))
3796 /* We expect this is the last parm. Otherwise it is wrong
3797 to assign bounds right now. */
3798 gcc_assert (i
== (fnargs
.length () - 1));
3799 assign_bounds (bndargs
, all
, true, false, false);
3800 targetm
.calls
.setup_incoming_vararg_bounds (all
.args_so_far
,
3805 assign_bounds (bndargs
, all
, false, true, true);
3810 /* Update info on where next arg arrives in registers. */
3811 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3812 data
.passed_type
, data
.named_arg
);
3814 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3818 assign_bounds (bndargs
, all
, true, true, true);
3821 if (targetm
.calls
.split_complex_arg
)
3822 assign_parms_unsplit_complex (&all
, fnargs
);
3826 /* Output all parameter conversion instructions (possibly including calls)
3827 now that all parameters have been copied out of hard registers. */
3828 emit_insn (all
.first_conversion_insn
);
3830 /* Estimate reload stack alignment from scalar return mode. */
3831 if (SUPPORTS_STACK_ALIGNMENT
)
3833 if (DECL_RESULT (fndecl
))
3835 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3836 machine_mode mode
= TYPE_MODE (type
);
3840 && !AGGREGATE_TYPE_P (type
))
3842 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3843 if (crtl
->stack_alignment_estimated
< align
)
3845 gcc_assert (!crtl
->stack_realign_processed
);
3846 crtl
->stack_alignment_estimated
= align
;
3852 /* If we are receiving a struct value address as the first argument, set up
3853 the RTL for the function result. As this might require code to convert
3854 the transmitted address to Pmode, we do this here to ensure that possible
3855 preliminary conversions of the address have been emitted already. */
3856 if (all
.function_result_decl
)
3858 tree result
= DECL_RESULT (current_function_decl
);
3859 rtx addr
= DECL_RTL (all
.function_result_decl
);
3862 if (DECL_BY_REFERENCE (result
))
3864 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3869 SET_DECL_VALUE_EXPR (result
,
3870 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3871 all
.function_result_decl
));
3872 addr
= convert_memory_address (Pmode
, addr
);
3873 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3874 set_mem_attributes (x
, result
, 1);
3877 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3879 set_parm_rtl (result
, x
);
3882 /* We have aligned all the args, so add space for the pretend args. */
3883 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3884 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3885 crtl
->args
.size
= all
.stack_args_size
.constant
;
3887 /* Adjust function incoming argument size for alignment and
3890 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3891 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3892 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3894 if (ARGS_GROW_DOWNWARD
)
3896 crtl
->args
.arg_offset_rtx
3897 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3898 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3899 size_int (-all
.stack_args_size
.constant
)),
3900 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3903 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3905 /* See how many bytes, if any, of its args a function should try to pop
3908 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3912 /* For stdarg.h function, save info about
3913 regs and stack space used by the named args. */
3915 crtl
->args
.info
= all
.args_so_far_v
;
3917 /* Set the rtx used for the function return value. Put this in its
3918 own variable so any optimizers that need this information don't have
3919 to include tree.h. Do this here so it gets done when an inlined
3920 function gets output. */
3923 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3924 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3926 /* If scalar return value was computed in a pseudo-reg, or was a named
3927 return value that got dumped to the stack, copy that to the hard
3929 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3931 tree decl_result
= DECL_RESULT (fndecl
);
3932 rtx decl_rtl
= DECL_RTL (decl_result
);
3934 if (REG_P (decl_rtl
)
3935 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3936 : DECL_REGISTER (decl_result
))
3940 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3942 if (chkp_function_instrumented_p (fndecl
))
3944 = targetm
.calls
.chkp_function_value_bounds (TREE_TYPE (decl_result
),
3946 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3947 /* The delay slot scheduler assumes that crtl->return_rtx
3948 holds the hard register containing the return value, not a
3949 temporary pseudo. */
3950 crtl
->return_rtx
= real_decl_rtl
;
3955 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3956 For all seen types, gimplify their sizes. */
3959 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3966 if (POINTER_TYPE_P (t
))
3968 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3969 && !TYPE_SIZES_GIMPLIFIED (t
))
3971 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3979 /* Gimplify the parameter list for current_function_decl. This involves
3980 evaluating SAVE_EXPRs of variable sized parameters and generating code
3981 to implement callee-copies reference parameters. Returns a sequence of
3982 statements to add to the beginning of the function. */
3985 gimplify_parameters (void)
3987 struct assign_parm_data_all all
;
3989 gimple_seq stmts
= NULL
;
3993 assign_parms_initialize_all (&all
);
3994 fnargs
= assign_parms_augmented_arg_list (&all
);
3996 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3998 struct assign_parm_data_one data
;
4000 /* Extract the type of PARM; adjust it according to ABI. */
4001 assign_parm_find_data_types (&all
, parm
, &data
);
4003 /* Early out for errors and void parameters. */
4004 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
4007 /* Update info on where next arg arrives in registers. */
4008 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
4009 data
.passed_type
, data
.named_arg
);
4011 /* ??? Once upon a time variable_size stuffed parameter list
4012 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4013 turned out to be less than manageable in the gimple world.
4014 Now we have to hunt them down ourselves. */
4015 walk_tree_without_duplicates (&data
.passed_type
,
4016 gimplify_parm_type
, &stmts
);
4018 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
4020 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
4021 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
4024 if (data
.passed_pointer
)
4026 tree type
= TREE_TYPE (data
.passed_type
);
4027 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
4028 type
, data
.named_arg
))
4032 /* For constant-sized objects, this is trivial; for
4033 variable-sized objects, we have to play games. */
4034 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
4035 && !(flag_stack_check
== GENERIC_STACK_CHECK
4036 && compare_tree_int (DECL_SIZE_UNIT (parm
),
4037 STACK_CHECK_MAX_VAR_SIZE
) > 0))
4039 local
= create_tmp_var (type
, get_name (parm
));
4040 DECL_IGNORED_P (local
) = 0;
4041 /* If PARM was addressable, move that flag over
4042 to the local copy, as its address will be taken,
4043 not the PARMs. Keep the parms address taken
4044 as we'll query that flag during gimplification. */
4045 if (TREE_ADDRESSABLE (parm
))
4046 TREE_ADDRESSABLE (local
) = 1;
4047 else if (TREE_CODE (type
) == COMPLEX_TYPE
4048 || TREE_CODE (type
) == VECTOR_TYPE
)
4049 DECL_GIMPLE_REG_P (local
) = 1;
4053 tree ptr_type
, addr
;
4055 ptr_type
= build_pointer_type (type
);
4056 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
4057 DECL_IGNORED_P (addr
) = 0;
4058 local
= build_fold_indirect_ref (addr
);
4060 t
= build_alloca_call_expr (DECL_SIZE_UNIT (parm
),
4062 max_int_size_in_bytes (type
));
4063 /* The call has been built for a variable-sized object. */
4064 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
4065 t
= fold_convert (ptr_type
, t
);
4066 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
4067 gimplify_and_add (t
, &stmts
);
4070 gimplify_assign (local
, parm
, &stmts
);
4072 SET_DECL_VALUE_EXPR (parm
, local
);
4073 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
4083 /* Compute the size and offset from the start of the stacked arguments for a
4084 parm passed in mode PASSED_MODE and with type TYPE.
4086 INITIAL_OFFSET_PTR points to the current offset into the stacked
4089 The starting offset and size for this parm are returned in
4090 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4091 nonzero, the offset is that of stack slot, which is returned in
4092 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4093 padding required from the initial offset ptr to the stack slot.
4095 IN_REGS is nonzero if the argument will be passed in registers. It will
4096 never be set if REG_PARM_STACK_SPACE is not defined.
4098 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4099 for arguments which are passed in registers.
4101 FNDECL is the function in which the argument was defined.
4103 There are two types of rounding that are done. The first, controlled by
4104 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4105 argument list to be aligned to the specific boundary (in bits). This
4106 rounding affects the initial and starting offsets, but not the argument
4109 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4110 optionally rounds the size of the parm to PARM_BOUNDARY. The
4111 initial offset is not affected by this rounding, while the size always
4112 is and the starting offset may be. */
4114 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4115 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4116 callers pass in the total size of args so far as
4117 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4120 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
4121 int reg_parm_stack_space
, int partial
,
4122 tree fndecl ATTRIBUTE_UNUSED
,
4123 struct args_size
*initial_offset_ptr
,
4124 struct locate_and_pad_arg_data
*locate
)
4127 pad_direction where_pad
;
4128 unsigned int boundary
, round_boundary
;
4129 int part_size_in_regs
;
4131 /* If we have found a stack parm before we reach the end of the
4132 area reserved for registers, skip that area. */
4135 if (reg_parm_stack_space
> 0)
4137 if (initial_offset_ptr
->var
)
4139 initial_offset_ptr
->var
4140 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4141 ssize_int (reg_parm_stack_space
));
4142 initial_offset_ptr
->constant
= 0;
4144 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4145 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4149 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4152 ? arg_size_in_bytes (type
)
4153 : size_int (GET_MODE_SIZE (passed_mode
)));
4154 where_pad
= targetm
.calls
.function_arg_padding (passed_mode
, type
);
4155 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4156 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4158 locate
->where_pad
= where_pad
;
4160 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4161 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4162 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4164 locate
->boundary
= boundary
;
4166 if (SUPPORTS_STACK_ALIGNMENT
)
4168 /* stack_alignment_estimated can't change after stack has been
4170 if (crtl
->stack_alignment_estimated
< boundary
)
4172 if (!crtl
->stack_realign_processed
)
4173 crtl
->stack_alignment_estimated
= boundary
;
4176 /* If stack is realigned and stack alignment value
4177 hasn't been finalized, it is OK not to increase
4178 stack_alignment_estimated. The bigger alignment
4179 requirement is recorded in stack_alignment_needed
4181 gcc_assert (!crtl
->stack_realign_finalized
4182 && crtl
->stack_realign_needed
);
4187 /* Remember if the outgoing parameter requires extra alignment on the
4188 calling function side. */
4189 if (crtl
->stack_alignment_needed
< boundary
)
4190 crtl
->stack_alignment_needed
= boundary
;
4191 if (crtl
->preferred_stack_boundary
< boundary
)
4192 crtl
->preferred_stack_boundary
= boundary
;
4194 if (ARGS_GROW_DOWNWARD
)
4196 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4197 if (initial_offset_ptr
->var
)
4198 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4199 initial_offset_ptr
->var
);
4203 if (where_pad
!= PAD_NONE
4204 && (!tree_fits_uhwi_p (sizetree
)
4205 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4206 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4207 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4210 locate
->slot_offset
.constant
+= part_size_in_regs
;
4212 if (!in_regs
|| reg_parm_stack_space
> 0)
4213 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4214 &locate
->alignment_pad
);
4216 locate
->size
.constant
= (-initial_offset_ptr
->constant
4217 - locate
->slot_offset
.constant
);
4218 if (initial_offset_ptr
->var
)
4219 locate
->size
.var
= size_binop (MINUS_EXPR
,
4220 size_binop (MINUS_EXPR
,
4222 initial_offset_ptr
->var
),
4223 locate
->slot_offset
.var
);
4225 /* Pad_below needs the pre-rounded size to know how much to pad
4227 locate
->offset
= locate
->slot_offset
;
4228 if (where_pad
== PAD_DOWNWARD
)
4229 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4234 if (!in_regs
|| reg_parm_stack_space
> 0)
4235 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4236 &locate
->alignment_pad
);
4237 locate
->slot_offset
= *initial_offset_ptr
;
4239 #ifdef PUSH_ROUNDING
4240 if (passed_mode
!= BLKmode
)
4241 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4244 /* Pad_below needs the pre-rounded size to know how much to pad below
4245 so this must be done before rounding up. */
4246 locate
->offset
= locate
->slot_offset
;
4247 if (where_pad
== PAD_DOWNWARD
)
4248 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4250 if (where_pad
!= PAD_NONE
4251 && (!tree_fits_uhwi_p (sizetree
)
4252 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4253 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4255 ADD_PARM_SIZE (locate
->size
, sizetree
);
4257 locate
->size
.constant
-= part_size_in_regs
;
4260 locate
->offset
.constant
4261 += targetm
.calls
.function_arg_offset (passed_mode
, type
);
4264 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4265 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4268 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4269 struct args_size
*alignment_pad
)
4271 tree save_var
= NULL_TREE
;
4272 HOST_WIDE_INT save_constant
= 0;
4273 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4274 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
4276 #ifdef SPARC_STACK_BOUNDARY_HACK
4277 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4278 the real alignment of %sp. However, when it does this, the
4279 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4280 if (SPARC_STACK_BOUNDARY_HACK
)
4284 if (boundary
> PARM_BOUNDARY
)
4286 save_var
= offset_ptr
->var
;
4287 save_constant
= offset_ptr
->constant
;
4290 alignment_pad
->var
= NULL_TREE
;
4291 alignment_pad
->constant
= 0;
4293 if (boundary
> BITS_PER_UNIT
)
4295 if (offset_ptr
->var
)
4297 tree sp_offset_tree
= ssize_int (sp_offset
);
4298 tree offset
= size_binop (PLUS_EXPR
,
4299 ARGS_SIZE_TREE (*offset_ptr
),
4302 if (ARGS_GROW_DOWNWARD
)
4303 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4305 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4307 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4308 /* ARGS_SIZE_TREE includes constant term. */
4309 offset_ptr
->constant
= 0;
4310 if (boundary
> PARM_BOUNDARY
)
4311 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4316 offset_ptr
->constant
= -sp_offset
+
4318 ? FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
)
4319 : CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
));
4321 if (boundary
> PARM_BOUNDARY
)
4322 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4328 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4330 unsigned int align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4331 if (passed_mode
!= BLKmode
)
4332 offset_ptr
->constant
+= -GET_MODE_SIZE (passed_mode
) & (align
- 1);
4335 if (TREE_CODE (sizetree
) != INTEGER_CST
4336 || (TREE_INT_CST_LOW (sizetree
) & (align
- 1)) != 0)
4338 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4339 tree s2
= round_up (sizetree
, align
);
4341 ADD_PARM_SIZE (*offset_ptr
, s2
);
4342 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4348 /* True if register REGNO was alive at a place where `setjmp' was
4349 called and was set more than once or is an argument. Such regs may
4350 be clobbered by `longjmp'. */
4353 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4355 /* There appear to be cases where some local vars never reach the
4356 backend but have bogus regnos. */
4357 if (regno
>= max_reg_num ())
4360 return ((REG_N_SETS (regno
) > 1
4361 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4363 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4366 /* Walk the tree of blocks describing the binding levels within a
4367 function and warn about variables the might be killed by setjmp or
4368 vfork. This is done after calling flow_analysis before register
4369 allocation since that will clobber the pseudo-regs to hard
4373 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4377 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4380 && DECL_RTL_SET_P (decl
)
4381 && REG_P (DECL_RTL (decl
))
4382 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4383 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4384 " %<longjmp%> or %<vfork%>", decl
);
4387 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4388 setjmp_vars_warning (setjmp_crosses
, sub
);
4391 /* Do the appropriate part of setjmp_vars_warning
4392 but for arguments instead of local variables. */
4395 setjmp_args_warning (bitmap setjmp_crosses
)
4398 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4399 decl
; decl
= DECL_CHAIN (decl
))
4400 if (DECL_RTL (decl
) != 0
4401 && REG_P (DECL_RTL (decl
))
4402 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4403 warning (OPT_Wclobbered
,
4404 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4408 /* Generate warning messages for variables live across setjmp. */
4411 generate_setjmp_warnings (void)
4413 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4415 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4416 || bitmap_empty_p (setjmp_crosses
))
4419 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4420 setjmp_args_warning (setjmp_crosses
);
4424 /* Reverse the order of elements in the fragment chain T of blocks,
4425 and return the new head of the chain (old last element).
4426 In addition to that clear BLOCK_SAME_RANGE flags when needed
4427 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4428 its super fragment origin. */
4431 block_fragments_nreverse (tree t
)
4433 tree prev
= 0, block
, next
, prev_super
= 0;
4434 tree super
= BLOCK_SUPERCONTEXT (t
);
4435 if (BLOCK_FRAGMENT_ORIGIN (super
))
4436 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4437 for (block
= t
; block
; block
= next
)
4439 next
= BLOCK_FRAGMENT_CHAIN (block
);
4440 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4441 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4442 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4444 BLOCK_SAME_RANGE (block
) = 0;
4445 prev_super
= BLOCK_SUPERCONTEXT (block
);
4446 BLOCK_SUPERCONTEXT (block
) = super
;
4449 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4450 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4452 BLOCK_SAME_RANGE (t
) = 0;
4453 BLOCK_SUPERCONTEXT (t
) = super
;
4457 /* Reverse the order of elements in the chain T of blocks,
4458 and return the new head of the chain (old last element).
4459 Also do the same on subblocks and reverse the order of elements
4460 in BLOCK_FRAGMENT_CHAIN as well. */
4463 blocks_nreverse_all (tree t
)
4465 tree prev
= 0, block
, next
;
4466 for (block
= t
; block
; block
= next
)
4468 next
= BLOCK_CHAIN (block
);
4469 BLOCK_CHAIN (block
) = prev
;
4470 if (BLOCK_FRAGMENT_CHAIN (block
)
4471 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4473 BLOCK_FRAGMENT_CHAIN (block
)
4474 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4475 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4476 BLOCK_SAME_RANGE (block
) = 0;
4478 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4485 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4486 and create duplicate blocks. */
4487 /* ??? Need an option to either create block fragments or to create
4488 abstract origin duplicates of a source block. It really depends
4489 on what optimization has been performed. */
4492 reorder_blocks (void)
4494 tree block
= DECL_INITIAL (current_function_decl
);
4496 if (block
== NULL_TREE
)
4499 auto_vec
<tree
, 10> block_stack
;
4501 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4502 clear_block_marks (block
);
4504 /* Prune the old trees away, so that they don't get in the way. */
4505 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4506 BLOCK_CHAIN (block
) = NULL_TREE
;
4508 /* Recreate the block tree from the note nesting. */
4509 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4510 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4513 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4516 clear_block_marks (tree block
)
4520 TREE_ASM_WRITTEN (block
) = 0;
4521 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4522 block
= BLOCK_CHAIN (block
);
4527 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4528 vec
<tree
> *p_block_stack
)
4531 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4533 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4537 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4539 tree block
= NOTE_BLOCK (insn
);
4542 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4546 BLOCK_SAME_RANGE (prev_end
) = 0;
4547 prev_end
= NULL_TREE
;
4549 /* If we have seen this block before, that means it now
4550 spans multiple address regions. Create a new fragment. */
4551 if (TREE_ASM_WRITTEN (block
))
4553 tree new_block
= copy_node (block
);
4555 BLOCK_SAME_RANGE (new_block
) = 0;
4556 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4557 BLOCK_FRAGMENT_CHAIN (new_block
)
4558 = BLOCK_FRAGMENT_CHAIN (origin
);
4559 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4561 NOTE_BLOCK (insn
) = new_block
;
4565 if (prev_beg
== current_block
&& prev_beg
)
4566 BLOCK_SAME_RANGE (block
) = 1;
4570 BLOCK_SUBBLOCKS (block
) = 0;
4571 TREE_ASM_WRITTEN (block
) = 1;
4572 /* When there's only one block for the entire function,
4573 current_block == block and we mustn't do this, it
4574 will cause infinite recursion. */
4575 if (block
!= current_block
)
4578 if (block
!= origin
)
4579 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4580 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4583 if (p_block_stack
->is_empty ())
4584 super
= current_block
;
4587 super
= p_block_stack
->last ();
4588 gcc_assert (super
== current_block
4589 || BLOCK_FRAGMENT_ORIGIN (super
)
4592 BLOCK_SUPERCONTEXT (block
) = super
;
4593 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4594 BLOCK_SUBBLOCKS (current_block
) = block
;
4595 current_block
= origin
;
4597 p_block_stack
->safe_push (block
);
4599 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4601 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4602 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4603 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4604 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4605 prev_beg
= NULL_TREE
;
4606 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4607 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4612 prev_beg
= NULL_TREE
;
4614 BLOCK_SAME_RANGE (prev_end
) = 0;
4615 prev_end
= NULL_TREE
;
4620 /* Reverse the order of elements in the chain T of blocks,
4621 and return the new head of the chain (old last element). */
4624 blocks_nreverse (tree t
)
4626 tree prev
= 0, block
, next
;
4627 for (block
= t
; block
; block
= next
)
4629 next
= BLOCK_CHAIN (block
);
4630 BLOCK_CHAIN (block
) = prev
;
4636 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4637 by modifying the last node in chain 1 to point to chain 2. */
4640 block_chainon (tree op1
, tree op2
)
4649 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4651 BLOCK_CHAIN (t1
) = op2
;
4653 #ifdef ENABLE_TREE_CHECKING
4656 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4657 gcc_assert (t2
!= t1
);
4664 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4665 non-NULL, list them all into VECTOR, in a depth-first preorder
4666 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4670 all_blocks (tree block
, tree
*vector
)
4676 TREE_ASM_WRITTEN (block
) = 0;
4678 /* Record this block. */
4680 vector
[n_blocks
] = block
;
4684 /* Record the subblocks, and their subblocks... */
4685 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4686 vector
? vector
+ n_blocks
: 0);
4687 block
= BLOCK_CHAIN (block
);
4693 /* Return a vector containing all the blocks rooted at BLOCK. The
4694 number of elements in the vector is stored in N_BLOCKS_P. The
4695 vector is dynamically allocated; it is the caller's responsibility
4696 to call `free' on the pointer returned. */
4699 get_block_vector (tree block
, int *n_blocks_p
)
4703 *n_blocks_p
= all_blocks (block
, NULL
);
4704 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4705 all_blocks (block
, block_vector
);
4707 return block_vector
;
4710 static GTY(()) int next_block_index
= 2;
4712 /* Set BLOCK_NUMBER for all the blocks in FN. */
4715 number_blocks (tree fn
)
4721 /* For XCOFF debugging output, we start numbering the blocks
4722 from 1 within each function, rather than keeping a running
4724 #if defined (XCOFF_DEBUGGING_INFO)
4725 if (write_symbols
== XCOFF_DEBUG
)
4726 next_block_index
= 1;
4729 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4731 /* The top-level BLOCK isn't numbered at all. */
4732 for (i
= 1; i
< n_blocks
; ++i
)
4733 /* We number the blocks from two. */
4734 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4736 free (block_vector
);
4741 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4744 debug_find_var_in_block_tree (tree var
, tree block
)
4748 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4752 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4754 tree ret
= debug_find_var_in_block_tree (var
, t
);
4762 /* Keep track of whether we're in a dummy function context. If we are,
4763 we don't want to invoke the set_current_function hook, because we'll
4764 get into trouble if the hook calls target_reinit () recursively or
4765 when the initial initialization is not yet complete. */
4767 static bool in_dummy_function
;
4769 /* Invoke the target hook when setting cfun. Update the optimization options
4770 if the function uses different options than the default. */
4773 invoke_set_current_function_hook (tree fndecl
)
4775 if (!in_dummy_function
)
4777 tree opts
= ((fndecl
)
4778 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4779 : optimization_default_node
);
4782 opts
= optimization_default_node
;
4784 /* Change optimization options if needed. */
4785 if (optimization_current_node
!= opts
)
4787 optimization_current_node
= opts
;
4788 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4791 targetm
.set_current_function (fndecl
);
4792 this_fn_optabs
= this_target_optabs
;
4794 if (opts
!= optimization_default_node
)
4796 init_tree_optimization_optabs (opts
);
4797 if (TREE_OPTIMIZATION_OPTABS (opts
))
4798 this_fn_optabs
= (struct target_optabs
*)
4799 TREE_OPTIMIZATION_OPTABS (opts
);
4804 /* cfun should never be set directly; use this function. */
4807 set_cfun (struct function
*new_cfun
, bool force
)
4809 if (cfun
!= new_cfun
|| force
)
4812 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4813 redirect_edge_var_map_empty ();
4817 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4819 static vec
<function
*> cfun_stack
;
4821 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4822 current_function_decl accordingly. */
4825 push_cfun (struct function
*new_cfun
)
4827 gcc_assert ((!cfun
&& !current_function_decl
)
4828 || (cfun
&& current_function_decl
== cfun
->decl
));
4829 cfun_stack
.safe_push (cfun
);
4830 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4831 set_cfun (new_cfun
);
4834 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4839 struct function
*new_cfun
= cfun_stack
.pop ();
4840 /* When in_dummy_function, we do have a cfun but current_function_decl is
4841 NULL. We also allow pushing NULL cfun and subsequently changing
4842 current_function_decl to something else and have both restored by
4844 gcc_checking_assert (in_dummy_function
4846 || current_function_decl
== cfun
->decl
);
4847 set_cfun (new_cfun
);
4848 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4851 /* Return value of funcdef and increase it. */
4853 get_next_funcdef_no (void)
4855 return funcdef_no
++;
4858 /* Return value of funcdef. */
4860 get_last_funcdef_no (void)
4865 /* Allocate a function structure for FNDECL and set its contents
4866 to the defaults. Set cfun to the newly-allocated object.
4867 Some of the helper functions invoked during initialization assume
4868 that cfun has already been set. Therefore, assign the new object
4869 directly into cfun and invoke the back end hook explicitly at the
4870 very end, rather than initializing a temporary and calling set_cfun
4873 ABSTRACT_P is true if this is a function that will never be seen by
4874 the middle-end. Such functions are front-end concepts (like C++
4875 function templates) that do not correspond directly to functions
4876 placed in object files. */
4879 allocate_struct_function (tree fndecl
, bool abstract_p
)
4881 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4883 cfun
= ggc_cleared_alloc
<function
> ();
4885 init_eh_for_function ();
4887 if (init_machine_status
)
4888 cfun
->machine
= (*init_machine_status
) ();
4890 #ifdef OVERRIDE_ABI_FORMAT
4891 OVERRIDE_ABI_FORMAT (fndecl
);
4894 if (fndecl
!= NULL_TREE
)
4896 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4897 cfun
->decl
= fndecl
;
4898 current_function_funcdef_no
= get_next_funcdef_no ();
4901 invoke_set_current_function_hook (fndecl
);
4903 if (fndecl
!= NULL_TREE
)
4905 tree result
= DECL_RESULT (fndecl
);
4909 /* Now that we have activated any function-specific attributes
4910 that might affect layout, particularly vector modes, relayout
4911 each of the parameters and the result. */
4912 relayout_decl (result
);
4913 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4914 parm
= DECL_CHAIN (parm
))
4915 relayout_decl (parm
);
4917 /* Similarly relayout the function decl. */
4918 targetm
.target_option
.relayout_function (fndecl
);
4921 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4923 #ifdef PCC_STATIC_STRUCT_RETURN
4924 cfun
->returns_pcc_struct
= 1;
4926 cfun
->returns_struct
= 1;
4929 cfun
->stdarg
= stdarg_p (fntype
);
4931 /* Assume all registers in stdarg functions need to be saved. */
4932 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4933 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4935 /* ??? This could be set on a per-function basis by the front-end
4936 but is this worth the hassle? */
4937 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4938 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4940 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4941 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4944 /* Don't enable begin stmt markers if var-tracking at assignments is
4945 disabled. The markers make little sense without the variable
4946 binding annotations among them. */
4947 cfun
->debug_nonbind_markers
= lang_hooks
.emits_begin_stmt
4948 && MAY_HAVE_DEBUG_MARKER_STMTS
;
4951 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4952 instead of just setting it. */
4955 push_struct_function (tree fndecl
)
4957 /* When in_dummy_function we might be in the middle of a pop_cfun and
4958 current_function_decl and cfun may not match. */
4959 gcc_assert (in_dummy_function
4960 || (!cfun
&& !current_function_decl
)
4961 || (cfun
&& current_function_decl
== cfun
->decl
));
4962 cfun_stack
.safe_push (cfun
);
4963 current_function_decl
= fndecl
;
4964 allocate_struct_function (fndecl
, false);
4967 /* Reset crtl and other non-struct-function variables to defaults as
4968 appropriate for emitting rtl at the start of a function. */
4971 prepare_function_start (void)
4973 gcc_assert (!get_last_insn ());
4976 init_varasm_status ();
4978 default_rtl_profile ();
4980 if (flag_stack_usage_info
)
4982 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4983 cfun
->su
->static_stack_size
= -1;
4986 cse_not_expected
= ! optimize
;
4988 /* Caller save not needed yet. */
4989 caller_save_needed
= 0;
4991 /* We haven't done register allocation yet. */
4994 /* Indicate that we have not instantiated virtual registers yet. */
4995 virtuals_instantiated
= 0;
4997 /* Indicate that we want CONCATs now. */
4998 generating_concat_p
= 1;
5000 /* Indicate we have no need of a frame pointer yet. */
5001 frame_pointer_needed
= 0;
5005 push_dummy_function (bool with_decl
)
5007 tree fn_decl
, fn_type
, fn_result_decl
;
5009 gcc_assert (!in_dummy_function
);
5010 in_dummy_function
= true;
5014 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
5015 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
5017 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
5018 NULL_TREE
, void_type_node
);
5019 DECL_RESULT (fn_decl
) = fn_result_decl
;
5022 fn_decl
= NULL_TREE
;
5024 push_struct_function (fn_decl
);
5027 /* Initialize the rtl expansion mechanism so that we can do simple things
5028 like generate sequences. This is used to provide a context during global
5029 initialization of some passes. You must call expand_dummy_function_end
5030 to exit this context. */
5033 init_dummy_function_start (void)
5035 push_dummy_function (false);
5036 prepare_function_start ();
5039 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5040 and initialize static variables for generating RTL for the statements
5044 init_function_start (tree subr
)
5046 /* Initialize backend, if needed. */
5049 prepare_function_start ();
5050 decide_function_section (subr
);
5052 /* Warn if this value is an aggregate type,
5053 regardless of which calling convention we are using for it. */
5054 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5055 warning (OPT_Waggregate_return
, "function returns an aggregate");
5058 /* Expand code to verify the stack_protect_guard. This is invoked at
5059 the end of a function to be protected. */
5062 stack_protect_epilogue (void)
5064 tree guard_decl
= targetm
.stack_protect_guard ();
5065 rtx_code_label
*label
= gen_label_rtx ();
5069 x
= expand_normal (crtl
->stack_protect_guard
);
5071 y
= expand_normal (guard_decl
);
5075 /* Allow the target to compare Y with X without leaking either into
5077 if (targetm
.have_stack_protect_test ()
5078 && ((seq
= targetm
.gen_stack_protect_test (x
, y
, label
)) != NULL_RTX
))
5081 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
5083 /* The noreturn predictor has been moved to the tree level. The rtl-level
5084 predictors estimate this branch about 20%, which isn't enough to get
5085 things moved out of line. Since this is the only extant case of adding
5086 a noreturn function at the rtl level, it doesn't seem worth doing ought
5087 except adding the prediction by hand. */
5088 rtx_insn
*tmp
= get_last_insn ();
5090 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
5092 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
5097 /* Start the RTL for a new function, and set variables used for
5099 SUBR is the FUNCTION_DECL node.
5100 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5101 the function's parameters, which must be run at any return statement. */
5104 expand_function_start (tree subr
)
5106 /* Make sure volatile mem refs aren't considered
5107 valid operands of arithmetic insns. */
5108 init_recog_no_volatile ();
5112 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5115 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5117 /* Make the label for return statements to jump to. Do not special
5118 case machines with special return instructions -- they will be
5119 handled later during jump, ifcvt, or epilogue creation. */
5120 return_label
= gen_label_rtx ();
5122 /* Initialize rtx used to return the value. */
5123 /* Do this before assign_parms so that we copy the struct value address
5124 before any library calls that assign parms might generate. */
5126 /* Decide whether to return the value in memory or in a register. */
5127 tree res
= DECL_RESULT (subr
);
5128 if (aggregate_value_p (res
, subr
))
5130 /* Returning something that won't go in a register. */
5131 rtx value_address
= 0;
5133 #ifdef PCC_STATIC_STRUCT_RETURN
5134 if (cfun
->returns_pcc_struct
)
5136 int size
= int_size_in_bytes (TREE_TYPE (res
));
5137 value_address
= assemble_static_space (size
);
5142 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5143 /* Expect to be passed the address of a place to store the value.
5144 If it is passed as an argument, assign_parms will take care of
5148 value_address
= gen_reg_rtx (Pmode
);
5149 emit_move_insn (value_address
, sv
);
5154 rtx x
= value_address
;
5155 if (!DECL_BY_REFERENCE (res
))
5157 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5158 set_mem_attributes (x
, res
, 1);
5160 set_parm_rtl (res
, x
);
5163 else if (DECL_MODE (res
) == VOIDmode
)
5164 /* If return mode is void, this decl rtl should not be used. */
5165 set_parm_rtl (res
, NULL_RTX
);
5168 /* Compute the return values into a pseudo reg, which we will copy
5169 into the true return register after the cleanups are done. */
5170 tree return_type
= TREE_TYPE (res
);
5172 /* If we may coalesce this result, make sure it has the expected mode
5173 in case it was promoted. But we need not bother about BLKmode. */
5174 machine_mode promoted_mode
5175 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5176 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5179 if (promoted_mode
!= BLKmode
)
5180 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5181 else if (TYPE_MODE (return_type
) != BLKmode
5182 && targetm
.calls
.return_in_msb (return_type
))
5183 /* expand_function_end will insert the appropriate padding in
5184 this case. Use the return value's natural (unpadded) mode
5185 within the function proper. */
5186 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5189 /* In order to figure out what mode to use for the pseudo, we
5190 figure out what the mode of the eventual return register will
5191 actually be, and use that. */
5192 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5194 /* Structures that are returned in registers are not
5195 aggregate_value_p, so we may see a PARALLEL or a REG. */
5196 if (REG_P (hard_reg
))
5197 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5200 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5201 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5205 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5206 result to the real return register(s). */
5207 DECL_REGISTER (res
) = 1;
5209 if (chkp_function_instrumented_p (current_function_decl
))
5211 tree return_type
= TREE_TYPE (res
);
5212 rtx bounds
= targetm
.calls
.chkp_function_value_bounds (return_type
,
5214 SET_DECL_BOUNDS_RTL (res
, bounds
);
5218 /* Initialize rtx for parameters and local variables.
5219 In some cases this requires emitting insns. */
5220 assign_parms (subr
);
5222 /* If function gets a static chain arg, store it. */
5223 if (cfun
->static_chain_decl
)
5225 tree parm
= cfun
->static_chain_decl
;
5230 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5231 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5233 set_decl_incoming_rtl (parm
, chain
, false);
5234 set_parm_rtl (parm
, local
);
5235 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5237 if (GET_MODE (local
) != GET_MODE (chain
))
5239 convert_move (local
, chain
, unsignedp
);
5240 insn
= get_last_insn ();
5243 insn
= emit_move_insn (local
, chain
);
5245 /* Mark the register as eliminable, similar to parameters. */
5247 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5248 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5250 /* If we aren't optimizing, save the static chain onto the stack. */
5253 tree saved_static_chain_decl
5254 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5255 DECL_NAME (parm
), TREE_TYPE (parm
));
5256 rtx saved_static_chain_rtx
5257 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5258 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5259 emit_move_insn (saved_static_chain_rtx
, chain
);
5260 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5261 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5265 /* The following was moved from init_function_start.
5266 The move was supposed to make sdb output more accurate. */
5267 /* Indicate the beginning of the function body,
5268 as opposed to parm setup. */
5269 emit_note (NOTE_INSN_FUNCTION_BEG
);
5271 gcc_assert (NOTE_P (get_last_insn ()));
5273 parm_birth_insn
= get_last_insn ();
5275 /* If the function receives a non-local goto, then store the
5276 bits we need to restore the frame pointer. */
5277 if (cfun
->nonlocal_goto_save_area
)
5282 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5283 gcc_assert (DECL_RTL_SET_P (var
));
5285 t_save
= build4 (ARRAY_REF
,
5286 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5287 cfun
->nonlocal_goto_save_area
,
5288 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5289 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5290 gcc_assert (GET_MODE (r_save
) == Pmode
);
5292 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5293 update_nonlocal_goto_save_area ();
5299 PROFILE_HOOK (current_function_funcdef_no
);
5303 /* If we are doing generic stack checking, the probe should go here. */
5304 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5305 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5309 pop_dummy_function (void)
5312 in_dummy_function
= false;
5315 /* Undo the effects of init_dummy_function_start. */
5317 expand_dummy_function_end (void)
5319 gcc_assert (in_dummy_function
);
5321 /* End any sequences that failed to be closed due to syntax errors. */
5322 while (in_sequence_p ())
5325 /* Outside function body, can't compute type's actual size
5326 until next function's body starts. */
5328 free_after_parsing (cfun
);
5329 free_after_compilation (cfun
);
5330 pop_dummy_function ();
5333 /* Helper for diddle_return_value. */
5336 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5341 if (REG_P (outgoing
))
5342 (*doit
) (outgoing
, arg
);
5343 else if (GET_CODE (outgoing
) == PARALLEL
)
5347 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5349 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5351 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5357 /* Call DOIT for each hard register used as a return value from
5358 the current function. */
5361 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5363 diddle_return_value_1 (doit
, arg
, crtl
->return_bnd
);
5364 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5368 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5374 clobber_return_register (void)
5376 diddle_return_value (do_clobber_return_reg
, NULL
);
5378 /* In case we do use pseudo to return value, clobber it too. */
5379 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5381 tree decl_result
= DECL_RESULT (current_function_decl
);
5382 rtx decl_rtl
= DECL_RTL (decl_result
);
5383 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5385 do_clobber_return_reg (decl_rtl
, NULL
);
5391 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5397 use_return_register (void)
5399 diddle_return_value (do_use_return_reg
, NULL
);
5402 /* Set the location of the insn chain starting at INSN to LOC. */
5405 set_insn_locations (rtx_insn
*insn
, int loc
)
5407 while (insn
!= NULL
)
5410 INSN_LOCATION (insn
) = loc
;
5411 insn
= NEXT_INSN (insn
);
5415 /* Generate RTL for the end of the current function. */
5418 expand_function_end (void)
5420 /* If arg_pointer_save_area was referenced only from a nested
5421 function, we will not have initialized it yet. Do that now. */
5422 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5423 get_arg_pointer_save_area ();
5425 /* If we are doing generic stack checking and this function makes calls,
5426 do a stack probe at the start of the function to ensure we have enough
5427 space for another stack frame. */
5428 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5430 rtx_insn
*insn
, *seq
;
5432 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5435 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5437 if (STACK_CHECK_MOVING_SP
)
5438 anti_adjust_stack_and_probe (max_frame_size
, true);
5440 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5443 set_insn_locations (seq
, prologue_location
);
5444 emit_insn_before (seq
, stack_check_probe_note
);
5449 /* End any sequences that failed to be closed due to syntax errors. */
5450 while (in_sequence_p ())
5453 clear_pending_stack_adjust ();
5454 do_pending_stack_adjust ();
5456 /* Output a linenumber for the end of the function.
5457 SDB depended on this. */
5458 set_curr_insn_location (input_location
);
5460 /* Before the return label (if any), clobber the return
5461 registers so that they are not propagated live to the rest of
5462 the function. This can only happen with functions that drop
5463 through; if there had been a return statement, there would
5464 have either been a return rtx, or a jump to the return label.
5466 We delay actual code generation after the current_function_value_rtx
5468 rtx_insn
*clobber_after
= get_last_insn ();
5470 /* Output the label for the actual return from the function. */
5471 emit_label (return_label
);
5473 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5475 /* Let except.c know where it should emit the call to unregister
5476 the function context for sjlj exceptions. */
5477 if (flag_exceptions
)
5478 sjlj_emit_function_exit_after (get_last_insn ());
5482 /* We want to ensure that instructions that may trap are not
5483 moved into the epilogue by scheduling, because we don't
5484 always emit unwind information for the epilogue. */
5485 if (cfun
->can_throw_non_call_exceptions
)
5486 emit_insn (gen_blockage ());
5489 /* If this is an implementation of throw, do what's necessary to
5490 communicate between __builtin_eh_return and the epilogue. */
5491 expand_eh_return ();
5493 /* If scalar return value was computed in a pseudo-reg, or was a named
5494 return value that got dumped to the stack, copy that to the hard
5496 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5498 tree decl_result
= DECL_RESULT (current_function_decl
);
5499 rtx decl_rtl
= DECL_RTL (decl_result
);
5501 if (REG_P (decl_rtl
)
5502 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5503 : DECL_REGISTER (decl_result
))
5505 rtx real_decl_rtl
= crtl
->return_rtx
;
5508 /* This should be set in assign_parms. */
5509 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5511 /* If this is a BLKmode structure being returned in registers,
5512 then use the mode computed in expand_return. Note that if
5513 decl_rtl is memory, then its mode may have been changed,
5514 but that crtl->return_rtx has not. */
5515 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5516 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5518 /* If a non-BLKmode return value should be padded at the least
5519 significant end of the register, shift it left by the appropriate
5520 amount. BLKmode results are handled using the group load/store
5522 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5523 && REG_P (real_decl_rtl
)
5524 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5526 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5527 REGNO (real_decl_rtl
)),
5529 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5531 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5533 /* If expand_function_start has created a PARALLEL for decl_rtl,
5534 move the result to the real return registers. Otherwise, do
5535 a group load from decl_rtl for a named return. */
5536 if (GET_CODE (decl_rtl
) == PARALLEL
)
5537 emit_group_move (real_decl_rtl
, decl_rtl
);
5539 emit_group_load (real_decl_rtl
, decl_rtl
,
5540 TREE_TYPE (decl_result
),
5541 int_size_in_bytes (TREE_TYPE (decl_result
)));
5543 /* In the case of complex integer modes smaller than a word, we'll
5544 need to generate some non-trivial bitfield insertions. Do that
5545 on a pseudo and not the hard register. */
5546 else if (GET_CODE (decl_rtl
) == CONCAT
5547 && is_complex_int_mode (GET_MODE (decl_rtl
), &cmode
)
5548 && GET_MODE_BITSIZE (cmode
) <= BITS_PER_WORD
)
5550 int old_generating_concat_p
;
5553 old_generating_concat_p
= generating_concat_p
;
5554 generating_concat_p
= 0;
5555 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5556 generating_concat_p
= old_generating_concat_p
;
5558 emit_move_insn (tmp
, decl_rtl
);
5559 emit_move_insn (real_decl_rtl
, tmp
);
5561 /* If a named return value dumped decl_return to memory, then
5562 we may need to re-do the PROMOTE_MODE signed/unsigned
5564 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5566 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5567 promote_function_mode (TREE_TYPE (decl_result
),
5568 GET_MODE (decl_rtl
), &unsignedp
,
5569 TREE_TYPE (current_function_decl
), 1);
5571 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5574 emit_move_insn (real_decl_rtl
, decl_rtl
);
5578 /* If returning a structure, arrange to return the address of the value
5579 in a place where debuggers expect to find it.
5581 If returning a structure PCC style,
5582 the caller also depends on this value.
5583 And cfun->returns_pcc_struct is not necessarily set. */
5584 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5585 && !targetm
.calls
.omit_struct_return_reg
)
5587 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5588 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5591 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5592 type
= TREE_TYPE (type
);
5594 value_address
= XEXP (value_address
, 0);
5596 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5597 current_function_decl
, true);
5599 /* Mark this as a function return value so integrate will delete the
5600 assignment and USE below when inlining this function. */
5601 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5603 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5604 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (outgoing
));
5605 value_address
= convert_memory_address (mode
, value_address
);
5607 emit_move_insn (outgoing
, value_address
);
5609 /* Show return register used to hold result (in this case the address
5611 crtl
->return_rtx
= outgoing
;
5614 /* Emit the actual code to clobber return register. Don't emit
5615 it if clobber_after is a barrier, then the previous basic block
5616 certainly doesn't fall thru into the exit block. */
5617 if (!BARRIER_P (clobber_after
))
5620 clobber_return_register ();
5621 rtx_insn
*seq
= get_insns ();
5624 emit_insn_after (seq
, clobber_after
);
5627 /* Output the label for the naked return from the function. */
5628 if (naked_return_label
)
5629 emit_label (naked_return_label
);
5631 /* @@@ This is a kludge. We want to ensure that instructions that
5632 may trap are not moved into the epilogue by scheduling, because
5633 we don't always emit unwind information for the epilogue. */
5634 if (cfun
->can_throw_non_call_exceptions
5635 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5636 emit_insn (gen_blockage ());
5638 /* If stack protection is enabled for this function, check the guard. */
5639 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
5640 stack_protect_epilogue ();
5642 /* If we had calls to alloca, and this machine needs
5643 an accurate stack pointer to exit the function,
5644 insert some code to save and restore the stack pointer. */
5645 if (! EXIT_IGNORE_STACK
5646 && cfun
->calls_alloca
)
5651 emit_stack_save (SAVE_FUNCTION
, &tem
);
5652 rtx_insn
*seq
= get_insns ();
5654 emit_insn_before (seq
, parm_birth_insn
);
5656 emit_stack_restore (SAVE_FUNCTION
, tem
);
5659 /* ??? This should no longer be necessary since stupid is no longer with
5660 us, but there are some parts of the compiler (eg reload_combine, and
5661 sh mach_dep_reorg) that still try and compute their own lifetime info
5662 instead of using the general framework. */
5663 use_return_register ();
5667 get_arg_pointer_save_area (void)
5669 rtx ret
= arg_pointer_save_area
;
5673 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5674 arg_pointer_save_area
= ret
;
5677 if (! crtl
->arg_pointer_save_area_init
)
5679 /* Save the arg pointer at the beginning of the function. The
5680 generated stack slot may not be a valid memory address, so we
5681 have to check it and fix it if necessary. */
5683 emit_move_insn (validize_mem (copy_rtx (ret
)),
5684 crtl
->args
.internal_arg_pointer
);
5685 rtx_insn
*seq
= get_insns ();
5688 push_topmost_sequence ();
5689 emit_insn_after (seq
, entry_of_function ());
5690 pop_topmost_sequence ();
5692 crtl
->arg_pointer_save_area_init
= true;
5699 /* If debugging dumps are requested, dump information about how the
5700 target handled -fstack-check=clash for the prologue.
5702 PROBES describes what if any probes were emitted.
5704 RESIDUALS indicates if the prologue had any residual allocation
5705 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5708 dump_stack_clash_frame_info (enum stack_clash_probes probes
, bool residuals
)
5715 case NO_PROBE_NO_FRAME
:
5717 "Stack clash no probe no stack adjustment in prologue.\n");
5719 case NO_PROBE_SMALL_FRAME
:
5721 "Stack clash no probe small stack adjustment in prologue.\n");
5724 fprintf (dump_file
, "Stack clash inline probes in prologue.\n");
5727 fprintf (dump_file
, "Stack clash probe loop in prologue.\n");
5732 fprintf (dump_file
, "Stack clash residual allocation in prologue.\n");
5734 fprintf (dump_file
, "Stack clash no residual allocation in prologue.\n");
5736 if (frame_pointer_needed
)
5737 fprintf (dump_file
, "Stack clash frame pointer needed.\n");
5739 fprintf (dump_file
, "Stack clash no frame pointer needed.\n");
5741 if (TREE_THIS_VOLATILE (cfun
->decl
))
5743 "Stack clash noreturn prologue, assuming no implicit"
5744 " probes in caller.\n");
5747 "Stack clash not noreturn prologue.\n");
5750 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5751 for the first time. */
5754 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5757 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5760 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5762 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5764 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5765 gcc_assert (*slot
== NULL
);
5770 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5771 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5772 insn, then record COPY as well. */
5775 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5777 hash_table
<insn_cache_hasher
> *hash
;
5780 hash
= epilogue_insn_hash
;
5781 if (!hash
|| !hash
->find (insn
))
5783 hash
= prologue_insn_hash
;
5784 if (!hash
|| !hash
->find (insn
))
5788 slot
= hash
->find_slot (copy
, INSERT
);
5789 gcc_assert (*slot
== NULL
);
5793 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5794 we can be running after reorg, SEQUENCE rtl is possible. */
5797 contains (const rtx_insn
*insn
, hash_table
<insn_cache_hasher
> *hash
)
5802 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5804 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5806 for (i
= seq
->len () - 1; i
>= 0; i
--)
5807 if (hash
->find (seq
->element (i
)))
5812 return hash
->find (const_cast<rtx_insn
*> (insn
)) != NULL
;
5816 prologue_contains (const rtx_insn
*insn
)
5818 return contains (insn
, prologue_insn_hash
);
5822 epilogue_contains (const rtx_insn
*insn
)
5824 return contains (insn
, epilogue_insn_hash
);
5828 prologue_epilogue_contains (const rtx_insn
*insn
)
5830 if (contains (insn
, prologue_insn_hash
))
5832 if (contains (insn
, epilogue_insn_hash
))
5838 record_prologue_seq (rtx_insn
*seq
)
5840 record_insns (seq
, NULL
, &prologue_insn_hash
);
5844 record_epilogue_seq (rtx_insn
*seq
)
5846 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5849 /* Set JUMP_LABEL for a return insn. */
5852 set_return_jump_label (rtx_insn
*returnjump
)
5854 rtx pat
= PATTERN (returnjump
);
5855 if (GET_CODE (pat
) == PARALLEL
)
5856 pat
= XVECEXP (pat
, 0, 0);
5857 if (ANY_RETURN_P (pat
))
5858 JUMP_LABEL (returnjump
) = pat
;
5860 JUMP_LABEL (returnjump
) = ret_rtx
;
5863 /* Return a sequence to be used as the split prologue for the current
5864 function, or NULL. */
5867 make_split_prologue_seq (void)
5869 if (!flag_split_stack
5870 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5874 emit_insn (targetm
.gen_split_stack_prologue ());
5875 rtx_insn
*seq
= get_insns ();
5878 record_insns (seq
, NULL
, &prologue_insn_hash
);
5879 set_insn_locations (seq
, prologue_location
);
5884 /* Return a sequence to be used as the prologue for the current function,
5888 make_prologue_seq (void)
5890 if (!targetm
.have_prologue ())
5894 rtx_insn
*seq
= targetm
.gen_prologue ();
5897 /* Insert an explicit USE for the frame pointer
5898 if the profiling is on and the frame pointer is required. */
5899 if (crtl
->profile
&& frame_pointer_needed
)
5900 emit_use (hard_frame_pointer_rtx
);
5902 /* Retain a map of the prologue insns. */
5903 record_insns (seq
, NULL
, &prologue_insn_hash
);
5904 emit_note (NOTE_INSN_PROLOGUE_END
);
5906 /* Ensure that instructions are not moved into the prologue when
5907 profiling is on. The call to the profiling routine can be
5908 emitted within the live range of a call-clobbered register. */
5909 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5910 emit_insn (gen_blockage ());
5914 set_insn_locations (seq
, prologue_location
);
5919 /* Return a sequence to be used as the epilogue for the current function,
5923 make_epilogue_seq (void)
5925 if (!targetm
.have_epilogue ())
5929 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5930 rtx_insn
*seq
= targetm
.gen_epilogue ();
5932 emit_jump_insn (seq
);
5934 /* Retain a map of the epilogue insns. */
5935 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5936 set_insn_locations (seq
, epilogue_location
);
5939 rtx_insn
*returnjump
= get_last_insn ();
5942 if (JUMP_P (returnjump
))
5943 set_return_jump_label (returnjump
);
5949 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5950 this into place with notes indicating where the prologue ends and where
5951 the epilogue begins. Update the basic block information when possible.
5953 Notes on epilogue placement:
5954 There are several kinds of edges to the exit block:
5955 * a single fallthru edge from LAST_BB
5956 * possibly, edges from blocks containing sibcalls
5957 * possibly, fake edges from infinite loops
5959 The epilogue is always emitted on the fallthru edge from the last basic
5960 block in the function, LAST_BB, into the exit block.
5962 If LAST_BB is empty except for a label, it is the target of every
5963 other basic block in the function that ends in a return. If a
5964 target has a return or simple_return pattern (possibly with
5965 conditional variants), these basic blocks can be changed so that a
5966 return insn is emitted into them, and their target is adjusted to
5967 the real exit block.
5969 Notes on shrink wrapping: We implement a fairly conservative
5970 version of shrink-wrapping rather than the textbook one. We only
5971 generate a single prologue and a single epilogue. This is
5972 sufficient to catch a number of interesting cases involving early
5975 First, we identify the blocks that require the prologue to occur before
5976 them. These are the ones that modify a call-saved register, or reference
5977 any of the stack or frame pointer registers. To simplify things, we then
5978 mark everything reachable from these blocks as also requiring a prologue.
5979 This takes care of loops automatically, and avoids the need to examine
5980 whether MEMs reference the frame, since it is sufficient to check for
5981 occurrences of the stack or frame pointer.
5983 We then compute the set of blocks for which the need for a prologue
5984 is anticipatable (borrowing terminology from the shrink-wrapping
5985 description in Muchnick's book). These are the blocks which either
5986 require a prologue themselves, or those that have only successors
5987 where the prologue is anticipatable. The prologue needs to be
5988 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5989 is not. For the moment, we ensure that only one such edge exists.
5991 The epilogue is placed as described above, but we make a
5992 distinction between inserting return and simple_return patterns
5993 when modifying other blocks that end in a return. Blocks that end
5994 in a sibcall omit the sibcall_epilogue if the block is not in
5998 thread_prologue_and_epilogue_insns (void)
6002 /* Can't deal with multiple successors of the entry block at the
6003 moment. Function should always have at least one entry
6005 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6007 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6008 edge orig_entry_edge
= entry_edge
;
6010 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
6011 rtx_insn
*prologue_seq
= make_prologue_seq ();
6012 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
6014 /* Try to perform a kind of shrink-wrapping, making sure the
6015 prologue/epilogue is emitted only around those parts of the
6016 function that require it. */
6017 try_shrink_wrapping (&entry_edge
, prologue_seq
);
6019 /* If the target can handle splitting the prologue/epilogue into separate
6020 components, try to shrink-wrap these components separately. */
6021 try_shrink_wrapping_separate (entry_edge
->dest
);
6023 /* If that did anything for any component we now need the generate the
6024 "main" prologue again. Because some targets require some of these
6025 to be called in a specific order (i386 requires the split prologue
6026 to be first, for example), we create all three sequences again here.
6027 If this does not work for some target, that target should not enable
6028 separate shrink-wrapping. */
6029 if (crtl
->shrink_wrapped_separate
)
6031 split_prologue_seq
= make_split_prologue_seq ();
6032 prologue_seq
= make_prologue_seq ();
6033 epilogue_seq
= make_epilogue_seq ();
6036 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6038 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6039 this marker for the splits of EH_RETURN patterns, and nothing else
6040 uses the flag in the meantime. */
6041 epilogue_completed
= 1;
6043 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6044 some targets, these get split to a special version of the epilogue
6045 code. In order to be able to properly annotate these with unwind
6046 info, try to split them now. If we get a valid split, drop an
6047 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6050 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6052 rtx_insn
*prev
, *last
, *trial
;
6054 if (e
->flags
& EDGE_FALLTHRU
)
6056 last
= BB_END (e
->src
);
6057 if (!eh_returnjump_p (last
))
6060 prev
= PREV_INSN (last
);
6061 trial
= try_split (PATTERN (last
), last
, 1);
6065 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6066 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6069 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6071 if (exit_fallthru_edge
)
6075 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
6076 commit_edge_insertions ();
6078 /* The epilogue insns we inserted may cause the exit edge to no longer
6080 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6082 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6083 && returnjump_p (BB_END (e
->src
)))
6084 e
->flags
&= ~EDGE_FALLTHRU
;
6087 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6089 /* We have a fall-through edge to the exit block, the source is not
6090 at the end of the function, and there will be an assembler epilogue
6091 at the end of the function.
6092 We can't use force_nonfallthru here, because that would try to
6093 use return. Inserting a jump 'by hand' is extremely messy, so
6094 we take advantage of cfg_layout_finalize using
6095 fixup_fallthru_exit_predecessor. */
6096 cfg_layout_initialize (0);
6098 FOR_EACH_BB_FN (cur_bb
, cfun
)
6099 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6100 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6101 cur_bb
->aux
= cur_bb
->next_bb
;
6102 cfg_layout_finalize ();
6106 /* Insert the prologue. */
6108 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6110 if (split_prologue_seq
|| prologue_seq
)
6112 rtx_insn
*split_prologue_insn
= split_prologue_seq
;
6113 if (split_prologue_seq
)
6115 while (split_prologue_insn
&& !NONDEBUG_INSN_P (split_prologue_insn
))
6116 split_prologue_insn
= NEXT_INSN (split_prologue_insn
);
6117 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6120 rtx_insn
*prologue_insn
= prologue_seq
;
6123 while (prologue_insn
&& !NONDEBUG_INSN_P (prologue_insn
))
6124 prologue_insn
= NEXT_INSN (prologue_insn
);
6125 insert_insn_on_edge (prologue_seq
, entry_edge
);
6128 commit_edge_insertions ();
6130 /* Look for basic blocks within the prologue insns. */
6131 if (split_prologue_insn
6132 && BLOCK_FOR_INSN (split_prologue_insn
) == NULL
)
6133 split_prologue_insn
= NULL
;
6135 && BLOCK_FOR_INSN (prologue_insn
) == NULL
)
6136 prologue_insn
= NULL
;
6137 if (split_prologue_insn
|| prologue_insn
)
6139 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
6140 bitmap_clear (blocks
);
6141 if (split_prologue_insn
)
6142 bitmap_set_bit (blocks
,
6143 BLOCK_FOR_INSN (split_prologue_insn
)->index
);
6145 bitmap_set_bit (blocks
, BLOCK_FOR_INSN (prologue_insn
)->index
);
6146 find_many_sub_basic_blocks (blocks
);
6150 default_rtl_profile ();
6152 /* Emit sibling epilogues before any sibling call sites. */
6153 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6154 (e
= ei_safe_edge (ei
));
6157 /* Skip those already handled, the ones that run without prologue. */
6158 if (e
->flags
& EDGE_IGNORE
)
6160 e
->flags
&= ~EDGE_IGNORE
;
6164 rtx_insn
*insn
= BB_END (e
->src
);
6166 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6169 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6172 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6174 rtx_insn
*seq
= get_insns ();
6177 /* Retain a map of the epilogue insns. Used in life analysis to
6178 avoid getting rid of sibcall epilogue insns. Do this before we
6179 actually emit the sequence. */
6180 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6181 set_insn_locations (seq
, epilogue_location
);
6183 emit_insn_before (seq
, insn
);
6189 rtx_insn
*insn
, *next
;
6191 /* Similarly, move any line notes that appear after the epilogue.
6192 There is no need, however, to be quite so anal about the existence
6193 of such a note. Also possibly move
6194 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6196 for (insn
= epilogue_seq
; insn
; insn
= next
)
6198 next
= NEXT_INSN (insn
);
6200 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6201 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6205 /* Threading the prologue and epilogue changes the artificial refs
6206 in the entry and exit blocks. */
6207 epilogue_completed
= 1;
6208 df_update_entry_exit_and_calls ();
6211 /* Reposition the prologue-end and epilogue-begin notes after
6212 instruction scheduling. */
6215 reposition_prologue_and_epilogue_notes (void)
6217 if (!targetm
.have_prologue ()
6218 && !targetm
.have_epilogue ()
6219 && !targetm
.have_sibcall_epilogue ())
6222 /* Since the hash table is created on demand, the fact that it is
6223 non-null is a signal that it is non-empty. */
6224 if (prologue_insn_hash
!= NULL
)
6226 size_t len
= prologue_insn_hash
->elements ();
6227 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6229 /* Scan from the beginning until we reach the last prologue insn. */
6230 /* ??? While we do have the CFG intact, there are two problems:
6231 (1) The prologue can contain loops (typically probing the stack),
6232 which means that the end of the prologue isn't in the first bb.
6233 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6234 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6238 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6241 else if (contains (insn
, prologue_insn_hash
))
6253 /* Scan forward looking for the PROLOGUE_END note. It should
6254 be right at the beginning of the block, possibly with other
6255 insn notes that got moved there. */
6256 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6259 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6264 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6266 last
= NEXT_INSN (last
);
6267 reorder_insns (note
, note
, last
);
6271 if (epilogue_insn_hash
!= NULL
)
6276 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6278 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6279 basic_block bb
= e
->src
;
6281 /* Scan from the beginning until we reach the first epilogue insn. */
6282 FOR_BB_INSNS (bb
, insn
)
6286 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6293 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6303 /* If the function has a single basic block, and no real
6304 epilogue insns (e.g. sibcall with no cleanup), the
6305 epilogue note can get scheduled before the prologue
6306 note. If we have frame related prologue insns, having
6307 them scanned during the epilogue will result in a crash.
6308 In this case re-order the epilogue note to just before
6309 the last insn in the block. */
6311 first
= BB_END (bb
);
6313 if (PREV_INSN (first
) != note
)
6314 reorder_insns (note
, note
, PREV_INSN (first
));
6320 /* Returns the name of function declared by FNDECL. */
6322 fndecl_name (tree fndecl
)
6326 return lang_hooks
.decl_printable_name (fndecl
, 1);
6329 /* Returns the name of function FN. */
6331 function_name (struct function
*fn
)
6333 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6334 return fndecl_name (fndecl
);
6337 /* Returns the name of the current function. */
6339 current_function_name (void)
6341 return function_name (cfun
);
6346 rest_of_handle_check_leaf_regs (void)
6348 #ifdef LEAF_REGISTERS
6349 crtl
->uses_only_leaf_regs
6350 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6355 /* Insert a TYPE into the used types hash table of CFUN. */
6358 used_types_insert_helper (tree type
, struct function
*func
)
6360 if (type
!= NULL
&& func
!= NULL
)
6362 if (func
->used_types_hash
== NULL
)
6363 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6365 func
->used_types_hash
->add (type
);
6369 /* Given a type, insert it into the used hash table in cfun. */
6371 used_types_insert (tree t
)
6373 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6378 if (TREE_CODE (t
) == ERROR_MARK
)
6380 if (TYPE_NAME (t
) == NULL_TREE
6381 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6382 t
= TYPE_MAIN_VARIANT (t
);
6383 if (debug_info_level
> DINFO_LEVEL_NONE
)
6386 used_types_insert_helper (t
, cfun
);
6389 /* So this might be a type referenced by a global variable.
6390 Record that type so that we can later decide to emit its
6391 debug information. */
6392 vec_safe_push (types_used_by_cur_var_decl
, t
);
6397 /* Helper to Hash a struct types_used_by_vars_entry. */
6400 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6402 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6404 return iterative_hash_object (entry
->type
,
6405 iterative_hash_object (entry
->var_decl
, 0));
6408 /* Hash function of the types_used_by_vars_entry hash table. */
6411 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6413 return hash_types_used_by_vars_entry (entry
);
6416 /*Equality function of the types_used_by_vars_entry hash table. */
6419 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6420 types_used_by_vars_entry
*e2
)
6422 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6425 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6428 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6430 if (type
!= NULL
&& var_decl
!= NULL
)
6432 types_used_by_vars_entry
**slot
;
6433 struct types_used_by_vars_entry e
;
6434 e
.var_decl
= var_decl
;
6436 if (types_used_by_vars_hash
== NULL
)
6437 types_used_by_vars_hash
6438 = hash_table
<used_type_hasher
>::create_ggc (37);
6440 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6443 struct types_used_by_vars_entry
*entry
;
6444 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6446 entry
->var_decl
= var_decl
;
6454 const pass_data pass_data_leaf_regs
=
6456 RTL_PASS
, /* type */
6457 "*leaf_regs", /* name */
6458 OPTGROUP_NONE
, /* optinfo_flags */
6459 TV_NONE
, /* tv_id */
6460 0, /* properties_required */
6461 0, /* properties_provided */
6462 0, /* properties_destroyed */
6463 0, /* todo_flags_start */
6464 0, /* todo_flags_finish */
6467 class pass_leaf_regs
: public rtl_opt_pass
6470 pass_leaf_regs (gcc::context
*ctxt
)
6471 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6474 /* opt_pass methods: */
6475 virtual unsigned int execute (function
*)
6477 return rest_of_handle_check_leaf_regs ();
6480 }; // class pass_leaf_regs
6485 make_pass_leaf_regs (gcc::context
*ctxt
)
6487 return new pass_leaf_regs (ctxt
);
6491 rest_of_handle_thread_prologue_and_epilogue (void)
6493 /* prepare_shrink_wrap is sensitive to the block structure of the control
6494 flow graph, so clean it up first. */
6498 /* On some machines, the prologue and epilogue code, or parts thereof,
6499 can be represented as RTL. Doing so lets us schedule insns between
6500 it and the rest of the code and also allows delayed branch
6501 scheduling to operate in the epilogue. */
6502 thread_prologue_and_epilogue_insns ();
6504 /* Some non-cold blocks may now be only reachable from cold blocks.
6506 fixup_partitions ();
6508 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6510 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6512 /* The stack usage info is finalized during prologue expansion. */
6513 if (flag_stack_usage_info
)
6514 output_stack_usage ();
6521 const pass_data pass_data_thread_prologue_and_epilogue
=
6523 RTL_PASS
, /* type */
6524 "pro_and_epilogue", /* name */
6525 OPTGROUP_NONE
, /* optinfo_flags */
6526 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6527 0, /* properties_required */
6528 0, /* properties_provided */
6529 0, /* properties_destroyed */
6530 0, /* todo_flags_start */
6531 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6534 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6537 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6538 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6541 /* opt_pass methods: */
6542 virtual unsigned int execute (function
*)
6544 return rest_of_handle_thread_prologue_and_epilogue ();
6547 }; // class pass_thread_prologue_and_epilogue
6552 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6554 return new pass_thread_prologue_and_epilogue (ctxt
);
6558 /* This mini-pass fixes fall-out from SSA in asm statements that have
6559 in-out constraints. Say you start with
6562 asm ("": "+mr" (inout));
6565 which is transformed very early to use explicit output and match operands:
6568 asm ("": "=mr" (inout) : "0" (inout));
6571 Or, after SSA and copyprop,
6573 asm ("": "=mr" (inout_2) : "0" (inout_1));
6576 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6577 they represent two separate values, so they will get different pseudo
6578 registers during expansion. Then, since the two operands need to match
6579 per the constraints, but use different pseudo registers, reload can
6580 only register a reload for these operands. But reloads can only be
6581 satisfied by hardregs, not by memory, so we need a register for this
6582 reload, just because we are presented with non-matching operands.
6583 So, even though we allow memory for this operand, no memory can be
6584 used for it, just because the two operands don't match. This can
6585 cause reload failures on register-starved targets.
6587 So it's a symptom of reload not being able to use memory for reloads
6588 or, alternatively it's also a symptom of both operands not coming into
6589 reload as matching (in which case the pseudo could go to memory just
6590 fine, as the alternative allows it, and no reload would be necessary).
6591 We fix the latter problem here, by transforming
6593 asm ("": "=mr" (inout_2) : "0" (inout_1));
6598 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6601 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6604 bool changed
= false;
6605 rtx op
= SET_SRC (p_sets
[0]);
6606 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6607 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6608 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6610 memset (output_matched
, 0, noutputs
* sizeof (bool));
6611 for (i
= 0; i
< ninputs
; i
++)
6615 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6619 if (*constraint
== '%')
6622 match
= strtoul (constraint
, &end
, 10);
6623 if (end
== constraint
)
6626 gcc_assert (match
< noutputs
);
6627 output
= SET_DEST (p_sets
[match
]);
6628 input
= RTVEC_ELT (inputs
, i
);
6629 /* Only do the transformation for pseudos. */
6630 if (! REG_P (output
)
6631 || rtx_equal_p (output
, input
)
6632 || (GET_MODE (input
) != VOIDmode
6633 && GET_MODE (input
) != GET_MODE (output
)))
6636 /* We can't do anything if the output is also used as input,
6637 as we're going to overwrite it. */
6638 for (j
= 0; j
< ninputs
; j
++)
6639 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6644 /* Avoid changing the same input several times. For
6645 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6646 only change in once (to out1), rather than changing it
6647 first to out1 and afterwards to out2. */
6650 for (j
= 0; j
< noutputs
; j
++)
6651 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6656 output_matched
[match
] = true;
6659 emit_move_insn (output
, input
);
6660 insns
= get_insns ();
6662 emit_insn_before (insns
, insn
);
6664 /* Now replace all mentions of the input with output. We can't
6665 just replace the occurrence in inputs[i], as the register might
6666 also be used in some other input (or even in an address of an
6667 output), which would mean possibly increasing the number of
6668 inputs by one (namely 'output' in addition), which might pose
6669 a too complicated problem for reload to solve. E.g. this situation:
6671 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6673 Here 'input' is used in two occurrences as input (once for the
6674 input operand, once for the address in the second output operand).
6675 If we would replace only the occurrence of the input operand (to
6676 make the matching) we would be left with this:
6679 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6681 Now we suddenly have two different input values (containing the same
6682 value, but different pseudos) where we formerly had only one.
6683 With more complicated asms this might lead to reload failures
6684 which wouldn't have happen without this pass. So, iterate over
6685 all operands and replace all occurrences of the register used. */
6686 for (j
= 0; j
< noutputs
; j
++)
6687 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6688 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6689 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6691 for (j
= 0; j
< ninputs
; j
++)
6692 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6693 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6700 df_insn_rescan (insn
);
6703 /* Add the decl D to the local_decls list of FUN. */
6706 add_local_decl (struct function
*fun
, tree d
)
6708 gcc_assert (VAR_P (d
));
6709 vec_safe_push (fun
->local_decls
, d
);
6714 const pass_data pass_data_match_asm_constraints
=
6716 RTL_PASS
, /* type */
6717 "asmcons", /* name */
6718 OPTGROUP_NONE
, /* optinfo_flags */
6719 TV_NONE
, /* tv_id */
6720 0, /* properties_required */
6721 0, /* properties_provided */
6722 0, /* properties_destroyed */
6723 0, /* todo_flags_start */
6724 0, /* todo_flags_finish */
6727 class pass_match_asm_constraints
: public rtl_opt_pass
6730 pass_match_asm_constraints (gcc::context
*ctxt
)
6731 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6734 /* opt_pass methods: */
6735 virtual unsigned int execute (function
*);
6737 }; // class pass_match_asm_constraints
6740 pass_match_asm_constraints::execute (function
*fun
)
6747 if (!crtl
->has_asm_statement
)
6750 df_set_flags (DF_DEFER_INSN_RESCAN
);
6751 FOR_EACH_BB_FN (bb
, fun
)
6753 FOR_BB_INSNS (bb
, insn
)
6758 pat
= PATTERN (insn
);
6759 if (GET_CODE (pat
) == PARALLEL
)
6760 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6761 else if (GET_CODE (pat
) == SET
)
6762 p_sets
= &PATTERN (insn
), noutputs
= 1;
6766 if (GET_CODE (*p_sets
) == SET
6767 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6768 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6772 return TODO_df_finish
;
6778 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6780 return new pass_match_asm_constraints (ctxt
);
6784 #include "gt-function.h"