1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
45 #include "stringpool.h"
51 #include "rtl-error.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
61 #include "optabs-tree.h"
63 #include "langhooks.h"
64 #include "common/common-target.h"
66 #include "tree-pass.h"
70 #include "cfgcleanup.h"
71 #include "cfgexpand.h"
72 #include "shrink-wrap.h"
75 #include "tree-chkp.h"
80 /* So we can assign to cfun in this file. */
83 #ifndef STACK_ALIGNMENT_NEEDED
84 #define STACK_ALIGNMENT_NEEDED 1
87 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
89 /* Round a value to the lowest integer less than it that is a multiple of
90 the required alignment. Avoid using division in case the value is
91 negative. Assume the alignment is a power of two. */
92 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94 /* Similar, but round to the next highest integer that meets the
96 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98 /* Nonzero once virtual register instantiation has been done.
99 assign_stack_local uses frame_pointer_rtx when this is nonzero.
100 calls.c:emit_library_call_value_1 uses it to set up
101 post-instantiation libcalls. */
102 int virtuals_instantiated
;
104 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
105 static GTY(()) int funcdef_no
;
107 /* These variables hold pointers to functions to create and destroy
108 target specific, per-function data structures. */
109 struct machine_function
* (*init_machine_status
) (void);
111 /* The currently compiled function. */
112 struct function
*cfun
= 0;
114 /* These hashes record the prologue and epilogue insns. */
116 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
118 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
119 static bool equal (rtx a
, rtx b
) { return a
== b
; }
123 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
125 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
128 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
129 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
131 /* Forward declarations. */
133 static struct temp_slot
*find_temp_slot_from_address (rtx
);
134 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
135 static void pad_below (struct args_size
*, machine_mode
, tree
);
136 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
137 static int all_blocks (tree
, tree
*);
138 static tree
*get_block_vector (tree
, int *);
139 extern tree
debug_find_var_in_block_tree (tree
, tree
);
140 /* We always define `record_insns' even if it's not used so that we
141 can always export `prologue_epilogue_contains'. */
142 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
144 static bool contains (const_rtx
, hash_table
<insn_cache_hasher
> *);
145 static void prepare_function_start (void);
146 static void do_clobber_return_reg (rtx
, void *);
147 static void do_use_return_reg (rtx
, void *);
150 /* Stack of nested functions. */
151 /* Keep track of the cfun stack. */
153 static vec
<function
*> function_context_stack
;
155 /* Save the current context for compilation of a nested function.
156 This is called from language-specific code. */
159 push_function_context (void)
162 allocate_struct_function (NULL
, false);
164 function_context_stack
.safe_push (cfun
);
168 /* Restore the last saved context, at the end of a nested function.
169 This function is called from language-specific code. */
172 pop_function_context (void)
174 struct function
*p
= function_context_stack
.pop ();
176 current_function_decl
= p
->decl
;
178 /* Reset variables that have known state during rtx generation. */
179 virtuals_instantiated
= 0;
180 generating_concat_p
= 1;
183 /* Clear out all parts of the state in F that can safely be discarded
184 after the function has been parsed, but not compiled, to let
185 garbage collection reclaim the memory. */
188 free_after_parsing (struct function
*f
)
193 /* Clear out all parts of the state in F that can safely be discarded
194 after the function has been compiled, to let garbage collection
195 reclaim the memory. */
198 free_after_compilation (struct function
*f
)
200 prologue_insn_hash
= NULL
;
201 epilogue_insn_hash
= NULL
;
203 free (crtl
->emit
.regno_pointer_align
);
205 memset (crtl
, 0, sizeof (struct rtl_data
));
209 f
->curr_properties
&= ~PROP_cfg
;
211 regno_reg_rtx
= NULL
;
214 /* Return size needed for stack frame based on slots so far allocated.
215 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
216 the caller may have to do that. */
219 get_frame_size (void)
221 if (FRAME_GROWS_DOWNWARD
)
222 return -frame_offset
;
227 /* Issue an error message and return TRUE if frame OFFSET overflows in
228 the signed target pointer arithmetics for function FUNC. Otherwise
232 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
234 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
236 if (size
> (HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
237 /* Leave room for the fixed part of the frame. */
238 - 64 * UNITS_PER_WORD
)
240 error_at (DECL_SOURCE_LOCATION (func
),
241 "total size of local objects too large");
248 /* Return stack slot alignment in bits for TYPE and MODE. */
251 get_stack_local_alignment (tree type
, machine_mode mode
)
253 unsigned int alignment
;
256 alignment
= BIGGEST_ALIGNMENT
;
258 alignment
= GET_MODE_ALIGNMENT (mode
);
260 /* Allow the frond-end to (possibly) increase the alignment of this
263 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
265 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
268 /* Determine whether it is possible to fit a stack slot of size SIZE and
269 alignment ALIGNMENT into an area in the stack frame that starts at
270 frame offset START and has a length of LENGTH. If so, store the frame
271 offset to be used for the stack slot in *POFFSET and return true;
272 return false otherwise. This function will extend the frame size when
273 given a start/length pair that lies at the end of the frame. */
276 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
277 HOST_WIDE_INT size
, unsigned int alignment
,
278 HOST_WIDE_INT
*poffset
)
280 HOST_WIDE_INT this_frame_offset
;
281 int frame_off
, frame_alignment
, frame_phase
;
283 /* Calculate how many bytes the start of local variables is off from
285 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
286 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
287 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
289 /* Round the frame offset to the specified alignment. */
291 /* We must be careful here, since FRAME_OFFSET might be negative and
292 division with a negative dividend isn't as well defined as we might
293 like. So we instead assume that ALIGNMENT is a power of two and
294 use logical operations which are unambiguous. */
295 if (FRAME_GROWS_DOWNWARD
)
297 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
298 (unsigned HOST_WIDE_INT
) alignment
)
302 = (CEIL_ROUND (start
- frame_phase
,
303 (unsigned HOST_WIDE_INT
) alignment
)
306 /* See if it fits. If this space is at the edge of the frame,
307 consider extending the frame to make it fit. Our caller relies on
308 this when allocating a new slot. */
309 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
310 frame_offset
= this_frame_offset
;
311 else if (this_frame_offset
< start
)
313 else if (start
+ length
== frame_offset
314 && this_frame_offset
+ size
> start
+ length
)
315 frame_offset
= this_frame_offset
+ size
;
316 else if (this_frame_offset
+ size
> start
+ length
)
319 *poffset
= this_frame_offset
;
323 /* Create a new frame_space structure describing free space in the stack
324 frame beginning at START and ending at END, and chain it into the
325 function's frame_space_list. */
328 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
330 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
331 space
->next
= crtl
->frame_space_list
;
332 crtl
->frame_space_list
= space
;
333 space
->start
= start
;
334 space
->length
= end
- start
;
337 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
338 with machine mode MODE.
340 ALIGN controls the amount of alignment for the address of the slot:
341 0 means according to MODE,
342 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
343 -2 means use BITS_PER_UNIT,
344 positive specifies alignment boundary in bits.
346 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
347 alignment and ASLK_RECORD_PAD bit set if we should remember
348 extra space we allocated for alignment purposes. When we are
349 called from assign_stack_temp_for_type, it is not set so we don't
350 track the same stack slot in two independent lists.
352 We do not round to stack_boundary here. */
355 assign_stack_local_1 (machine_mode mode
, HOST_WIDE_INT size
,
359 int bigend_correction
= 0;
360 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
361 unsigned int alignment
, alignment_in_bits
;
365 alignment
= get_stack_local_alignment (NULL
, mode
);
366 alignment
/= BITS_PER_UNIT
;
368 else if (align
== -1)
370 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
371 size
= CEIL_ROUND (size
, alignment
);
373 else if (align
== -2)
374 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
376 alignment
= align
/ BITS_PER_UNIT
;
378 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
380 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
381 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
383 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
384 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
387 if (SUPPORTS_STACK_ALIGNMENT
)
389 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
391 if (!crtl
->stack_realign_processed
)
392 crtl
->stack_alignment_estimated
= alignment_in_bits
;
395 /* If stack is realigned and stack alignment value
396 hasn't been finalized, it is OK not to increase
397 stack_alignment_estimated. The bigger alignment
398 requirement is recorded in stack_alignment_needed
400 gcc_assert (!crtl
->stack_realign_finalized
);
401 if (!crtl
->stack_realign_needed
)
403 /* It is OK to reduce the alignment as long as the
404 requested size is 0 or the estimated stack
405 alignment >= mode alignment. */
406 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
408 || (crtl
->stack_alignment_estimated
409 >= GET_MODE_ALIGNMENT (mode
)));
410 alignment_in_bits
= crtl
->stack_alignment_estimated
;
411 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
417 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
418 crtl
->stack_alignment_needed
= alignment_in_bits
;
419 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
420 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
422 if (mode
!= BLKmode
|| size
!= 0)
424 if (kind
& ASLK_RECORD_PAD
)
426 struct frame_space
**psp
;
428 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
430 struct frame_space
*space
= *psp
;
431 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
432 alignment
, &slot_offset
))
435 if (slot_offset
> space
->start
)
436 add_frame_space (space
->start
, slot_offset
);
437 if (slot_offset
+ size
< space
->start
+ space
->length
)
438 add_frame_space (slot_offset
+ size
,
439 space
->start
+ space
->length
);
444 else if (!STACK_ALIGNMENT_NEEDED
)
446 slot_offset
= frame_offset
;
450 old_frame_offset
= frame_offset
;
452 if (FRAME_GROWS_DOWNWARD
)
454 frame_offset
-= size
;
455 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
457 if (kind
& ASLK_RECORD_PAD
)
459 if (slot_offset
> frame_offset
)
460 add_frame_space (frame_offset
, slot_offset
);
461 if (slot_offset
+ size
< old_frame_offset
)
462 add_frame_space (slot_offset
+ size
, old_frame_offset
);
467 frame_offset
+= size
;
468 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
470 if (kind
& ASLK_RECORD_PAD
)
472 if (slot_offset
> old_frame_offset
)
473 add_frame_space (old_frame_offset
, slot_offset
);
474 if (slot_offset
+ size
< frame_offset
)
475 add_frame_space (slot_offset
+ size
, frame_offset
);
480 /* On a big-endian machine, if we are allocating more space than we will use,
481 use the least significant bytes of those that are allocated. */
482 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
483 bigend_correction
= size
- GET_MODE_SIZE (mode
);
485 /* If we have already instantiated virtual registers, return the actual
486 address relative to the frame pointer. */
487 if (virtuals_instantiated
)
488 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
490 (slot_offset
+ bigend_correction
491 + STARTING_FRAME_OFFSET
, Pmode
));
493 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
495 (slot_offset
+ bigend_correction
,
498 x
= gen_rtx_MEM (mode
, addr
);
499 set_mem_align (x
, alignment_in_bits
);
500 MEM_NOTRAP_P (x
) = 1;
503 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
505 if (frame_offset_overflow (frame_offset
, current_function_decl
))
511 /* Wrap up assign_stack_local_1 with last parameter as false. */
514 assign_stack_local (machine_mode mode
, HOST_WIDE_INT size
, int align
)
516 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
519 /* In order to evaluate some expressions, such as function calls returning
520 structures in memory, we need to temporarily allocate stack locations.
521 We record each allocated temporary in the following structure.
523 Associated with each temporary slot is a nesting level. When we pop up
524 one level, all temporaries associated with the previous level are freed.
525 Normally, all temporaries are freed after the execution of the statement
526 in which they were created. However, if we are inside a ({...}) grouping,
527 the result may be in a temporary and hence must be preserved. If the
528 result could be in a temporary, we preserve it if we can determine which
529 one it is in. If we cannot determine which temporary may contain the
530 result, all temporaries are preserved. A temporary is preserved by
531 pretending it was allocated at the previous nesting level. */
533 struct GTY(()) temp_slot
{
534 /* Points to next temporary slot. */
535 struct temp_slot
*next
;
536 /* Points to previous temporary slot. */
537 struct temp_slot
*prev
;
538 /* The rtx to used to reference the slot. */
540 /* The size, in units, of the slot. */
542 /* The type of the object in the slot, or zero if it doesn't correspond
543 to a type. We use this to determine whether a slot can be reused.
544 It can be reused if objects of the type of the new slot will always
545 conflict with objects of the type of the old slot. */
547 /* The alignment (in bits) of the slot. */
549 /* Nonzero if this temporary is currently in use. */
551 /* Nesting level at which this slot is being used. */
553 /* The offset of the slot from the frame_pointer, including extra space
554 for alignment. This info is for combine_temp_slots. */
555 HOST_WIDE_INT base_offset
;
556 /* The size of the slot, including extra space for alignment. This
557 info is for combine_temp_slots. */
558 HOST_WIDE_INT full_size
;
561 /* Entry for the below hash table. */
562 struct GTY((for_user
)) temp_slot_address_entry
{
565 struct temp_slot
*temp_slot
;
568 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
570 static hashval_t
hash (temp_slot_address_entry
*);
571 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
574 /* A table of addresses that represent a stack slot. The table is a mapping
575 from address RTXen to a temp slot. */
576 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
577 static size_t n_temp_slots_in_use
;
579 /* Removes temporary slot TEMP from LIST. */
582 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
585 temp
->next
->prev
= temp
->prev
;
587 temp
->prev
->next
= temp
->next
;
591 temp
->prev
= temp
->next
= NULL
;
594 /* Inserts temporary slot TEMP to LIST. */
597 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
601 (*list
)->prev
= temp
;
606 /* Returns the list of used temp slots at LEVEL. */
608 static struct temp_slot
**
609 temp_slots_at_level (int level
)
611 if (level
>= (int) vec_safe_length (used_temp_slots
))
612 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
614 return &(*used_temp_slots
)[level
];
617 /* Returns the maximal temporary slot level. */
620 max_slot_level (void)
622 if (!used_temp_slots
)
625 return used_temp_slots
->length () - 1;
628 /* Moves temporary slot TEMP to LEVEL. */
631 move_slot_to_level (struct temp_slot
*temp
, int level
)
633 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
634 insert_slot_to_list (temp
, temp_slots_at_level (level
));
638 /* Make temporary slot TEMP available. */
641 make_slot_available (struct temp_slot
*temp
)
643 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
644 insert_slot_to_list (temp
, &avail_temp_slots
);
647 n_temp_slots_in_use
--;
650 /* Compute the hash value for an address -> temp slot mapping.
651 The value is cached on the mapping entry. */
653 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
655 int do_not_record
= 0;
656 return hash_rtx (t
->address
, GET_MODE (t
->address
),
657 &do_not_record
, NULL
, false);
660 /* Return the hash value for an address -> temp slot mapping. */
662 temp_address_hasher::hash (temp_slot_address_entry
*t
)
667 /* Compare two address -> temp slot mapping entries. */
669 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
670 temp_slot_address_entry
*t2
)
672 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
675 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
677 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
679 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
680 t
->address
= address
;
681 t
->temp_slot
= temp_slot
;
682 t
->hash
= temp_slot_address_compute_hash (t
);
683 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
686 /* Remove an address -> temp slot mapping entry if the temp slot is
687 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
689 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
691 const struct temp_slot_address_entry
*t
= *slot
;
692 if (! t
->temp_slot
->in_use
)
693 temp_slot_address_table
->clear_slot (slot
);
697 /* Remove all mappings of addresses to unused temp slots. */
699 remove_unused_temp_slot_addresses (void)
701 /* Use quicker clearing if there aren't any active temp slots. */
702 if (n_temp_slots_in_use
)
703 temp_slot_address_table
->traverse
704 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
706 temp_slot_address_table
->empty ();
709 /* Find the temp slot corresponding to the object at address X. */
711 static struct temp_slot
*
712 find_temp_slot_from_address (rtx x
)
715 struct temp_slot_address_entry tmp
, *t
;
717 /* First try the easy way:
718 See if X exists in the address -> temp slot mapping. */
720 tmp
.temp_slot
= NULL
;
721 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
722 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
726 /* If we have a sum involving a register, see if it points to a temp
728 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
729 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
731 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
732 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
735 /* Last resort: Address is a virtual stack var address. */
736 if (GET_CODE (x
) == PLUS
737 && XEXP (x
, 0) == virtual_stack_vars_rtx
738 && CONST_INT_P (XEXP (x
, 1)))
741 for (i
= max_slot_level (); i
>= 0; i
--)
742 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
744 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
745 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
753 /* Allocate a temporary stack slot and record it for possible later
756 MODE is the machine mode to be given to the returned rtx.
758 SIZE is the size in units of the space required. We do no rounding here
759 since assign_stack_local will do any required rounding.
761 TYPE is the type that will be used for the stack slot. */
764 assign_stack_temp_for_type (machine_mode mode
, HOST_WIDE_INT size
,
768 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
771 /* If SIZE is -1 it means that somebody tried to allocate a temporary
772 of a variable size. */
773 gcc_assert (size
!= -1);
775 align
= get_stack_local_alignment (type
, mode
);
777 /* Try to find an available, already-allocated temporary of the proper
778 mode which meets the size and alignment requirements. Choose the
779 smallest one with the closest alignment.
781 If assign_stack_temp is called outside of the tree->rtl expansion,
782 we cannot reuse the stack slots (that may still refer to
783 VIRTUAL_STACK_VARS_REGNUM). */
784 if (!virtuals_instantiated
)
786 for (p
= avail_temp_slots
; p
; p
= p
->next
)
788 if (p
->align
>= align
&& p
->size
>= size
789 && GET_MODE (p
->slot
) == mode
790 && objects_must_conflict_p (p
->type
, type
)
791 && (best_p
== 0 || best_p
->size
> p
->size
792 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
794 if (p
->align
== align
&& p
->size
== size
)
797 cut_slot_from_list (selected
, &avail_temp_slots
);
806 /* Make our best, if any, the one to use. */
810 cut_slot_from_list (selected
, &avail_temp_slots
);
812 /* If there are enough aligned bytes left over, make them into a new
813 temp_slot so that the extra bytes don't get wasted. Do this only
814 for BLKmode slots, so that we can be sure of the alignment. */
815 if (GET_MODE (best_p
->slot
) == BLKmode
)
817 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
818 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
820 if (best_p
->size
- rounded_size
>= alignment
)
822 p
= ggc_alloc
<temp_slot
> ();
824 p
->size
= best_p
->size
- rounded_size
;
825 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
826 p
->full_size
= best_p
->full_size
- rounded_size
;
827 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
828 p
->align
= best_p
->align
;
829 p
->type
= best_p
->type
;
830 insert_slot_to_list (p
, &avail_temp_slots
);
832 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
835 best_p
->size
= rounded_size
;
836 best_p
->full_size
= rounded_size
;
841 /* If we still didn't find one, make a new temporary. */
844 HOST_WIDE_INT frame_offset_old
= frame_offset
;
846 p
= ggc_alloc
<temp_slot
> ();
848 /* We are passing an explicit alignment request to assign_stack_local.
849 One side effect of that is assign_stack_local will not round SIZE
850 to ensure the frame offset remains suitably aligned.
852 So for requests which depended on the rounding of SIZE, we go ahead
853 and round it now. We also make sure ALIGNMENT is at least
854 BIGGEST_ALIGNMENT. */
855 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
856 p
->slot
= assign_stack_local_1 (mode
,
866 /* The following slot size computation is necessary because we don't
867 know the actual size of the temporary slot until assign_stack_local
868 has performed all the frame alignment and size rounding for the
869 requested temporary. Note that extra space added for alignment
870 can be either above or below this stack slot depending on which
871 way the frame grows. We include the extra space if and only if it
872 is above this slot. */
873 if (FRAME_GROWS_DOWNWARD
)
874 p
->size
= frame_offset_old
- frame_offset
;
878 /* Now define the fields used by combine_temp_slots. */
879 if (FRAME_GROWS_DOWNWARD
)
881 p
->base_offset
= frame_offset
;
882 p
->full_size
= frame_offset_old
- frame_offset
;
886 p
->base_offset
= frame_offset_old
;
887 p
->full_size
= frame_offset
- frame_offset_old
;
896 p
->level
= temp_slot_level
;
897 n_temp_slots_in_use
++;
899 pp
= temp_slots_at_level (p
->level
);
900 insert_slot_to_list (p
, pp
);
901 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
903 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
904 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
905 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
907 /* If we know the alias set for the memory that will be used, use
908 it. If there's no TYPE, then we don't know anything about the
909 alias set for the memory. */
910 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
911 set_mem_align (slot
, align
);
913 /* If a type is specified, set the relevant flags. */
915 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
916 MEM_NOTRAP_P (slot
) = 1;
921 /* Allocate a temporary stack slot and record it for possible later
922 reuse. First two arguments are same as in preceding function. */
925 assign_stack_temp (machine_mode mode
, HOST_WIDE_INT size
)
927 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
930 /* Assign a temporary.
931 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
932 and so that should be used in error messages. In either case, we
933 allocate of the given type.
934 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
935 it is 0 if a register is OK.
936 DONT_PROMOTE is 1 if we should not promote values in register
940 assign_temp (tree type_or_decl
, int memory_required
,
941 int dont_promote ATTRIBUTE_UNUSED
)
949 if (DECL_P (type_or_decl
))
950 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
952 decl
= NULL
, type
= type_or_decl
;
954 mode
= TYPE_MODE (type
);
956 unsignedp
= TYPE_UNSIGNED (type
);
959 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
960 end. See also create_tmp_var for the gimplification-time check. */
961 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
963 if (mode
== BLKmode
|| memory_required
)
965 HOST_WIDE_INT size
= int_size_in_bytes (type
);
968 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
969 problems with allocating the stack space. */
973 /* Unfortunately, we don't yet know how to allocate variable-sized
974 temporaries. However, sometimes we can find a fixed upper limit on
975 the size, so try that instead. */
977 size
= max_int_size_in_bytes (type
);
979 /* The size of the temporary may be too large to fit into an integer. */
980 /* ??? Not sure this should happen except for user silliness, so limit
981 this to things that aren't compiler-generated temporaries. The
982 rest of the time we'll die in assign_stack_temp_for_type. */
983 if (decl
&& size
== -1
984 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
986 error ("size of variable %q+D is too large", decl
);
990 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
996 mode
= promote_mode (type
, mode
, &unsignedp
);
999 return gen_reg_rtx (mode
);
1002 /* Combine temporary stack slots which are adjacent on the stack.
1004 This allows for better use of already allocated stack space. This is only
1005 done for BLKmode slots because we can be sure that we won't have alignment
1006 problems in this case. */
1009 combine_temp_slots (void)
1011 struct temp_slot
*p
, *q
, *next
, *next_q
;
1014 /* We can't combine slots, because the information about which slot
1015 is in which alias set will be lost. */
1016 if (flag_strict_aliasing
)
1019 /* If there are a lot of temp slots, don't do anything unless
1020 high levels of optimization. */
1021 if (! flag_expensive_optimizations
)
1022 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1023 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1026 for (p
= avail_temp_slots
; p
; p
= next
)
1032 if (GET_MODE (p
->slot
) != BLKmode
)
1035 for (q
= p
->next
; q
; q
= next_q
)
1041 if (GET_MODE (q
->slot
) != BLKmode
)
1044 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1046 /* Q comes after P; combine Q into P. */
1048 p
->full_size
+= q
->full_size
;
1051 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1053 /* P comes after Q; combine P into Q. */
1055 q
->full_size
+= p
->full_size
;
1060 cut_slot_from_list (q
, &avail_temp_slots
);
1063 /* Either delete P or advance past it. */
1065 cut_slot_from_list (p
, &avail_temp_slots
);
1069 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1070 slot that previously was known by OLD_RTX. */
1073 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1075 struct temp_slot
*p
;
1077 if (rtx_equal_p (old_rtx
, new_rtx
))
1080 p
= find_temp_slot_from_address (old_rtx
);
1082 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1083 NEW_RTX is a register, see if one operand of the PLUS is a
1084 temporary location. If so, NEW_RTX points into it. Otherwise,
1085 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1086 in common between them. If so, try a recursive call on those
1090 if (GET_CODE (old_rtx
) != PLUS
)
1093 if (REG_P (new_rtx
))
1095 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1096 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1099 else if (GET_CODE (new_rtx
) != PLUS
)
1102 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1103 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1104 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1105 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1106 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1107 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1108 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1109 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1114 /* Otherwise add an alias for the temp's address. */
1115 insert_temp_slot_address (new_rtx
, p
);
1118 /* If X could be a reference to a temporary slot, mark that slot as
1119 belonging to the to one level higher than the current level. If X
1120 matched one of our slots, just mark that one. Otherwise, we can't
1121 easily predict which it is, so upgrade all of them.
1123 This is called when an ({...}) construct occurs and a statement
1124 returns a value in memory. */
1127 preserve_temp_slots (rtx x
)
1129 struct temp_slot
*p
= 0, *next
;
1134 /* If X is a register that is being used as a pointer, see if we have
1135 a temporary slot we know it points to. */
1136 if (REG_P (x
) && REG_POINTER (x
))
1137 p
= find_temp_slot_from_address (x
);
1139 /* If X is not in memory or is at a constant address, it cannot be in
1140 a temporary slot. */
1141 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1144 /* First see if we can find a match. */
1146 p
= find_temp_slot_from_address (XEXP (x
, 0));
1150 if (p
->level
== temp_slot_level
)
1151 move_slot_to_level (p
, temp_slot_level
- 1);
1155 /* Otherwise, preserve all non-kept slots at this level. */
1156 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1159 move_slot_to_level (p
, temp_slot_level
- 1);
1163 /* Free all temporaries used so far. This is normally called at the
1164 end of generating code for a statement. */
1167 free_temp_slots (void)
1169 struct temp_slot
*p
, *next
;
1170 bool some_available
= false;
1172 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1175 make_slot_available (p
);
1176 some_available
= true;
1181 remove_unused_temp_slot_addresses ();
1182 combine_temp_slots ();
1186 /* Push deeper into the nesting level for stack temporaries. */
1189 push_temp_slots (void)
1194 /* Pop a temporary nesting level. All slots in use in the current level
1198 pop_temp_slots (void)
1204 /* Initialize temporary slots. */
1207 init_temp_slots (void)
1209 /* We have not allocated any temporaries yet. */
1210 avail_temp_slots
= 0;
1211 vec_alloc (used_temp_slots
, 0);
1212 temp_slot_level
= 0;
1213 n_temp_slots_in_use
= 0;
1215 /* Set up the table to map addresses to temp slots. */
1216 if (! temp_slot_address_table
)
1217 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1219 temp_slot_address_table
->empty ();
1222 /* Functions and data structures to keep track of the values hard regs
1223 had at the start of the function. */
1225 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1226 and has_hard_reg_initial_val.. */
1227 struct GTY(()) initial_value_pair
{
1231 /* ??? This could be a VEC but there is currently no way to define an
1232 opaque VEC type. This could be worked around by defining struct
1233 initial_value_pair in function.h. */
1234 struct GTY(()) initial_value_struct
{
1237 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1240 /* If a pseudo represents an initial hard reg (or expression), return
1241 it, else return NULL_RTX. */
1244 get_hard_reg_initial_reg (rtx reg
)
1246 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1252 for (i
= 0; i
< ivs
->num_entries
; i
++)
1253 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1254 return ivs
->entries
[i
].hard_reg
;
1259 /* Make sure that there's a pseudo register of mode MODE that stores the
1260 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1263 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1265 struct initial_value_struct
*ivs
;
1268 rv
= has_hard_reg_initial_val (mode
, regno
);
1272 ivs
= crtl
->hard_reg_initial_vals
;
1275 ivs
= ggc_alloc
<initial_value_struct
> ();
1276 ivs
->num_entries
= 0;
1277 ivs
->max_entries
= 5;
1278 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1279 crtl
->hard_reg_initial_vals
= ivs
;
1282 if (ivs
->num_entries
>= ivs
->max_entries
)
1284 ivs
->max_entries
+= 5;
1285 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1289 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1290 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1292 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1295 /* See if get_hard_reg_initial_val has been used to create a pseudo
1296 for the initial value of hard register REGNO in mode MODE. Return
1297 the associated pseudo if so, otherwise return NULL. */
1300 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1302 struct initial_value_struct
*ivs
;
1305 ivs
= crtl
->hard_reg_initial_vals
;
1307 for (i
= 0; i
< ivs
->num_entries
; i
++)
1308 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1309 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1310 return ivs
->entries
[i
].pseudo
;
1316 emit_initial_value_sets (void)
1318 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1326 for (i
= 0; i
< ivs
->num_entries
; i
++)
1327 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1331 emit_insn_at_entry (seq
);
1335 /* Return the hardreg-pseudoreg initial values pair entry I and
1336 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1338 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1340 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1341 if (!ivs
|| i
>= ivs
->num_entries
)
1344 *hreg
= ivs
->entries
[i
].hard_reg
;
1345 *preg
= ivs
->entries
[i
].pseudo
;
1349 /* These routines are responsible for converting virtual register references
1350 to the actual hard register references once RTL generation is complete.
1352 The following four variables are used for communication between the
1353 routines. They contain the offsets of the virtual registers from their
1354 respective hard registers. */
1356 static int in_arg_offset
;
1357 static int var_offset
;
1358 static int dynamic_offset
;
1359 static int out_arg_offset
;
1360 static int cfa_offset
;
1362 /* In most machines, the stack pointer register is equivalent to the bottom
1365 #ifndef STACK_POINTER_OFFSET
1366 #define STACK_POINTER_OFFSET 0
1369 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1370 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1373 /* If not defined, pick an appropriate default for the offset of dynamically
1374 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1375 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1377 #ifndef STACK_DYNAMIC_OFFSET
1379 /* The bottom of the stack points to the actual arguments. If
1380 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1381 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1382 stack space for register parameters is not pushed by the caller, but
1383 rather part of the fixed stack areas and hence not included in
1384 `crtl->outgoing_args_size'. Nevertheless, we must allow
1385 for it when allocating stack dynamic objects. */
1387 #ifdef INCOMING_REG_PARM_STACK_SPACE
1388 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1389 ((ACCUMULATE_OUTGOING_ARGS \
1390 ? (crtl->outgoing_args_size \
1391 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1392 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1393 : 0) + (STACK_POINTER_OFFSET))
1395 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1396 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1397 + (STACK_POINTER_OFFSET))
1402 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1403 is a virtual register, return the equivalent hard register and set the
1404 offset indirectly through the pointer. Otherwise, return 0. */
1407 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1410 HOST_WIDE_INT offset
;
1412 if (x
== virtual_incoming_args_rtx
)
1414 if (stack_realign_drap
)
1416 /* Replace virtual_incoming_args_rtx with internal arg
1417 pointer if DRAP is used to realign stack. */
1418 new_rtx
= crtl
->args
.internal_arg_pointer
;
1422 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1424 else if (x
== virtual_stack_vars_rtx
)
1425 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1426 else if (x
== virtual_stack_dynamic_rtx
)
1427 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1428 else if (x
== virtual_outgoing_args_rtx
)
1429 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1430 else if (x
== virtual_cfa_rtx
)
1432 #ifdef FRAME_POINTER_CFA_OFFSET
1433 new_rtx
= frame_pointer_rtx
;
1435 new_rtx
= arg_pointer_rtx
;
1437 offset
= cfa_offset
;
1439 else if (x
== virtual_preferred_stack_boundary_rtx
)
1441 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1451 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1452 registers present inside of *LOC. The expression is simplified,
1453 as much as possible, but is not to be considered "valid" in any sense
1454 implied by the target. Return true if any change is made. */
1457 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1461 bool changed
= false;
1462 subrtx_ptr_iterator::array_type array
;
1463 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1469 HOST_WIDE_INT offset
;
1470 switch (GET_CODE (x
))
1473 new_rtx
= instantiate_new_reg (x
, &offset
);
1476 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1479 iter
.skip_subrtxes ();
1483 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1486 XEXP (x
, 0) = new_rtx
;
1487 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1489 iter
.skip_subrtxes ();
1493 /* FIXME -- from old code */
1494 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1495 we can commute the PLUS and SUBREG because pointers into the
1496 frame are well-behaved. */
1507 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1508 matches the predicate for insn CODE operand OPERAND. */
1511 safe_insn_predicate (int code
, int operand
, rtx x
)
1513 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1516 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1517 registers present inside of insn. The result will be a valid insn. */
1520 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1522 HOST_WIDE_INT offset
;
1524 bool any_change
= false;
1525 rtx set
, new_rtx
, x
;
1528 /* There are some special cases to be handled first. */
1529 set
= single_set (insn
);
1532 /* We're allowed to assign to a virtual register. This is interpreted
1533 to mean that the underlying register gets assigned the inverse
1534 transformation. This is used, for example, in the handling of
1536 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1541 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1542 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1543 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1544 x
= force_operand (x
, new_rtx
);
1546 emit_move_insn (new_rtx
, x
);
1551 emit_insn_before (seq
, insn
);
1556 /* Handle a straight copy from a virtual register by generating a
1557 new add insn. The difference between this and falling through
1558 to the generic case is avoiding a new pseudo and eliminating a
1559 move insn in the initial rtl stream. */
1560 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1561 if (new_rtx
&& offset
!= 0
1562 && REG_P (SET_DEST (set
))
1563 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1567 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1568 gen_int_mode (offset
,
1569 GET_MODE (SET_DEST (set
))),
1570 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1571 if (x
!= SET_DEST (set
))
1572 emit_move_insn (SET_DEST (set
), x
);
1577 emit_insn_before (seq
, insn
);
1582 extract_insn (insn
);
1583 insn_code
= INSN_CODE (insn
);
1585 /* Handle a plus involving a virtual register by determining if the
1586 operands remain valid if they're modified in place. */
1587 if (GET_CODE (SET_SRC (set
)) == PLUS
1588 && recog_data
.n_operands
>= 3
1589 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1590 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1591 && CONST_INT_P (recog_data
.operand
[2])
1592 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1594 offset
+= INTVAL (recog_data
.operand
[2]);
1596 /* If the sum is zero, then replace with a plain move. */
1598 && REG_P (SET_DEST (set
))
1599 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1602 emit_move_insn (SET_DEST (set
), new_rtx
);
1606 emit_insn_before (seq
, insn
);
1611 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1613 /* Using validate_change and apply_change_group here leaves
1614 recog_data in an invalid state. Since we know exactly what
1615 we want to check, do those two by hand. */
1616 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1617 && safe_insn_predicate (insn_code
, 2, x
))
1619 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1620 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1623 /* Fall through into the regular operand fixup loop in
1624 order to take care of operands other than 1 and 2. */
1630 extract_insn (insn
);
1631 insn_code
= INSN_CODE (insn
);
1634 /* In the general case, we expect virtual registers to appear only in
1635 operands, and then only as either bare registers or inside memories. */
1636 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1638 x
= recog_data
.operand
[i
];
1639 switch (GET_CODE (x
))
1643 rtx addr
= XEXP (x
, 0);
1645 if (!instantiate_virtual_regs_in_rtx (&addr
))
1649 x
= replace_equiv_address (x
, addr
, true);
1650 /* It may happen that the address with the virtual reg
1651 was valid (e.g. based on the virtual stack reg, which might
1652 be acceptable to the predicates with all offsets), whereas
1653 the address now isn't anymore, for instance when the address
1654 is still offsetted, but the base reg isn't virtual-stack-reg
1655 anymore. Below we would do a force_reg on the whole operand,
1656 but this insn might actually only accept memory. Hence,
1657 before doing that last resort, try to reload the address into
1658 a register, so this operand stays a MEM. */
1659 if (!safe_insn_predicate (insn_code
, i
, x
))
1661 addr
= force_reg (GET_MODE (addr
), addr
);
1662 x
= replace_equiv_address (x
, addr
, true);
1667 emit_insn_before (seq
, insn
);
1672 new_rtx
= instantiate_new_reg (x
, &offset
);
1673 if (new_rtx
== NULL
)
1681 /* Careful, special mode predicates may have stuff in
1682 insn_data[insn_code].operand[i].mode that isn't useful
1683 to us for computing a new value. */
1684 /* ??? Recognize address_operand and/or "p" constraints
1685 to see if (plus new offset) is a valid before we put
1686 this through expand_simple_binop. */
1687 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1688 gen_int_mode (offset
, GET_MODE (x
)),
1689 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1692 emit_insn_before (seq
, insn
);
1697 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1698 if (new_rtx
== NULL
)
1703 new_rtx
= expand_simple_binop
1704 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1705 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1706 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1709 emit_insn_before (seq
, insn
);
1711 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1712 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1720 /* At this point, X contains the new value for the operand.
1721 Validate the new value vs the insn predicate. Note that
1722 asm insns will have insn_code -1 here. */
1723 if (!safe_insn_predicate (insn_code
, i
, x
))
1728 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1729 x
= copy_to_reg (x
);
1732 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1736 emit_insn_before (seq
, insn
);
1739 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1745 /* Propagate operand changes into the duplicates. */
1746 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1747 *recog_data
.dup_loc
[i
]
1748 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1750 /* Force re-recognition of the instruction for validation. */
1751 INSN_CODE (insn
) = -1;
1754 if (asm_noperands (PATTERN (insn
)) >= 0)
1756 if (!check_asm_operands (PATTERN (insn
)))
1758 error_for_asm (insn
, "impossible constraint in %<asm%>");
1759 /* For asm goto, instead of fixing up all the edges
1760 just clear the template and clear input operands
1761 (asm goto doesn't have any output operands). */
1764 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1765 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1766 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1767 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1775 if (recog_memoized (insn
) < 0)
1776 fatal_insn_not_found (insn
);
1780 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1781 do any instantiation required. */
1784 instantiate_decl_rtl (rtx x
)
1791 /* If this is a CONCAT, recurse for the pieces. */
1792 if (GET_CODE (x
) == CONCAT
)
1794 instantiate_decl_rtl (XEXP (x
, 0));
1795 instantiate_decl_rtl (XEXP (x
, 1));
1799 /* If this is not a MEM, no need to do anything. Similarly if the
1800 address is a constant or a register that is not a virtual register. */
1805 if (CONSTANT_P (addr
)
1807 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1808 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1811 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1814 /* Helper for instantiate_decls called via walk_tree: Process all decls
1815 in the given DECL_VALUE_EXPR. */
1818 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1826 if (DECL_RTL_SET_P (t
))
1827 instantiate_decl_rtl (DECL_RTL (t
));
1828 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1829 && DECL_INCOMING_RTL (t
))
1830 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1831 if ((TREE_CODE (t
) == VAR_DECL
1832 || TREE_CODE (t
) == RESULT_DECL
)
1833 && DECL_HAS_VALUE_EXPR_P (t
))
1835 tree v
= DECL_VALUE_EXPR (t
);
1836 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1843 /* Subroutine of instantiate_decls: Process all decls in the given
1844 BLOCK node and all its subblocks. */
1847 instantiate_decls_1 (tree let
)
1851 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1853 if (DECL_RTL_SET_P (t
))
1854 instantiate_decl_rtl (DECL_RTL (t
));
1855 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1857 tree v
= DECL_VALUE_EXPR (t
);
1858 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1862 /* Process all subblocks. */
1863 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1864 instantiate_decls_1 (t
);
1867 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1868 all virtual registers in their DECL_RTL's. */
1871 instantiate_decls (tree fndecl
)
1876 /* Process all parameters of the function. */
1877 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1879 instantiate_decl_rtl (DECL_RTL (decl
));
1880 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1881 if (DECL_HAS_VALUE_EXPR_P (decl
))
1883 tree v
= DECL_VALUE_EXPR (decl
);
1884 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1888 if ((decl
= DECL_RESULT (fndecl
))
1889 && TREE_CODE (decl
) == RESULT_DECL
)
1891 if (DECL_RTL_SET_P (decl
))
1892 instantiate_decl_rtl (DECL_RTL (decl
));
1893 if (DECL_HAS_VALUE_EXPR_P (decl
))
1895 tree v
= DECL_VALUE_EXPR (decl
);
1896 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1900 /* Process the saved static chain if it exists. */
1901 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1902 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1903 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1905 /* Now process all variables defined in the function or its subblocks. */
1906 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1908 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1909 if (DECL_RTL_SET_P (decl
))
1910 instantiate_decl_rtl (DECL_RTL (decl
));
1911 vec_free (cfun
->local_decls
);
1914 /* Pass through the INSNS of function FNDECL and convert virtual register
1915 references to hard register references. */
1918 instantiate_virtual_regs (void)
1922 /* Compute the offsets to use for this function. */
1923 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1924 var_offset
= STARTING_FRAME_OFFSET
;
1925 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1926 out_arg_offset
= STACK_POINTER_OFFSET
;
1927 #ifdef FRAME_POINTER_CFA_OFFSET
1928 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1930 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1933 /* Initialize recognition, indicating that volatile is OK. */
1936 /* Scan through all the insns, instantiating every virtual register still
1938 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1941 /* These patterns in the instruction stream can never be recognized.
1942 Fortunately, they shouldn't contain virtual registers either. */
1943 if (GET_CODE (PATTERN (insn
)) == USE
1944 || GET_CODE (PATTERN (insn
)) == CLOBBER
1945 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1947 else if (DEBUG_INSN_P (insn
))
1948 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn
));
1950 instantiate_virtual_regs_in_insn (insn
);
1952 if (insn
->deleted ())
1955 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1957 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1959 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1962 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1963 instantiate_decls (current_function_decl
);
1965 targetm
.instantiate_decls ();
1967 /* Indicate that, from now on, assign_stack_local should use
1968 frame_pointer_rtx. */
1969 virtuals_instantiated
= 1;
1976 const pass_data pass_data_instantiate_virtual_regs
=
1978 RTL_PASS
, /* type */
1980 OPTGROUP_NONE
, /* optinfo_flags */
1981 TV_NONE
, /* tv_id */
1982 0, /* properties_required */
1983 0, /* properties_provided */
1984 0, /* properties_destroyed */
1985 0, /* todo_flags_start */
1986 0, /* todo_flags_finish */
1989 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1992 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1993 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1996 /* opt_pass methods: */
1997 virtual unsigned int execute (function
*)
1999 return instantiate_virtual_regs ();
2002 }; // class pass_instantiate_virtual_regs
2007 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2009 return new pass_instantiate_virtual_regs (ctxt
);
2013 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2014 This means a type for which function calls must pass an address to the
2015 function or get an address back from the function.
2016 EXP may be a type node or an expression (whose type is tested). */
2019 aggregate_value_p (const_tree exp
, const_tree fntype
)
2021 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2022 int i
, regno
, nregs
;
2026 switch (TREE_CODE (fntype
))
2030 tree fndecl
= get_callee_fndecl (fntype
);
2032 fntype
= TREE_TYPE (fndecl
);
2033 else if (CALL_EXPR_FN (fntype
))
2034 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2036 /* For internal functions, assume nothing needs to be
2037 returned in memory. */
2042 fntype
= TREE_TYPE (fntype
);
2047 case IDENTIFIER_NODE
:
2051 /* We don't expect other tree types here. */
2055 if (VOID_TYPE_P (type
))
2058 /* If a record should be passed the same as its first (and only) member
2059 don't pass it as an aggregate. */
2060 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2061 return aggregate_value_p (first_field (type
), fntype
);
2063 /* If the front end has decided that this needs to be passed by
2064 reference, do so. */
2065 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2066 && DECL_BY_REFERENCE (exp
))
2069 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2070 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2073 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2074 and thus can't be returned in registers. */
2075 if (TREE_ADDRESSABLE (type
))
2078 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2081 if (targetm
.calls
.return_in_memory (type
, fntype
))
2084 /* Make sure we have suitable call-clobbered regs to return
2085 the value in; if not, we must return it in memory. */
2086 reg
= hard_function_value (type
, 0, fntype
, 0);
2088 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2093 regno
= REGNO (reg
);
2094 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2095 for (i
= 0; i
< nregs
; i
++)
2096 if (! call_used_regs
[regno
+ i
])
2102 /* Return true if we should assign DECL a pseudo register; false if it
2103 should live on the local stack. */
2106 use_register_for_decl (const_tree decl
)
2108 if (TREE_CODE (decl
) == SSA_NAME
)
2110 /* We often try to use the SSA_NAME, instead of its underlying
2111 decl, to get type information and guide decisions, to avoid
2112 differences of behavior between anonymous and named
2113 variables, but in this one case we have to go for the actual
2114 variable if there is one. The main reason is that, at least
2115 at -O0, we want to place user variables on the stack, but we
2116 don't mind using pseudos for anonymous or ignored temps.
2117 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2118 should go in pseudos, whereas their corresponding variables
2119 might have to go on the stack. So, disregarding the decl
2120 here would negatively impact debug info at -O0, enable
2121 coalescing between SSA_NAMEs that ought to get different
2122 stack/pseudo assignments, and get the incoming argument
2123 processing thoroughly confused by PARM_DECLs expected to live
2124 in stack slots but assigned to pseudos. */
2125 if (!SSA_NAME_VAR (decl
))
2126 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2127 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2129 decl
= SSA_NAME_VAR (decl
);
2132 /* Honor volatile. */
2133 if (TREE_SIDE_EFFECTS (decl
))
2136 /* Honor addressability. */
2137 if (TREE_ADDRESSABLE (decl
))
2140 /* RESULT_DECLs are a bit special in that they're assigned without
2141 regard to use_register_for_decl, but we generally only store in
2142 them. If we coalesce their SSA NAMEs, we'd better return a
2143 result that matches the assignment in expand_function_start. */
2144 if (TREE_CODE (decl
) == RESULT_DECL
)
2146 /* If it's not an aggregate, we're going to use a REG or a
2147 PARALLEL containing a REG. */
2148 if (!aggregate_value_p (decl
, current_function_decl
))
2151 /* If expand_function_start determines the return value, we'll
2152 use MEM if it's not by reference. */
2153 if (cfun
->returns_pcc_struct
2154 || (targetm
.calls
.struct_value_rtx
2155 (TREE_TYPE (current_function_decl
), 1)))
2156 return DECL_BY_REFERENCE (decl
);
2158 /* Otherwise, we're taking an extra all.function_result_decl
2159 argument. It's set up in assign_parms_augmented_arg_list,
2160 under the (negated) conditions above, and then it's used to
2161 set up the RESULT_DECL rtl in assign_params, after looping
2162 over all parameters. Now, if the RESULT_DECL is not by
2163 reference, we'll use a MEM either way. */
2164 if (!DECL_BY_REFERENCE (decl
))
2167 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2168 the function_result_decl's assignment. Since it's a pointer,
2169 we can short-circuit a number of the tests below, and we must
2170 duplicat e them because we don't have the
2171 function_result_decl to test. */
2172 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2174 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2177 /* We don't set DECL_REGISTER for the function_result_decl. */
2181 /* Decl is implicitly addressible by bound stores and loads
2182 if it is an aggregate holding bounds. */
2183 if (chkp_function_instrumented_p (current_function_decl
)
2185 && !BOUNDED_P (decl
)
2186 && chkp_type_has_pointer (TREE_TYPE (decl
)))
2189 /* Only register-like things go in registers. */
2190 if (DECL_MODE (decl
) == BLKmode
)
2193 /* If -ffloat-store specified, don't put explicit float variables
2195 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2196 propagates values across these stores, and it probably shouldn't. */
2197 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2200 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2203 /* If we're not interested in tracking debugging information for
2204 this decl, then we can certainly put it in a register. */
2205 if (DECL_IGNORED_P (decl
))
2211 if (!DECL_REGISTER (decl
))
2214 switch (TREE_CODE (TREE_TYPE (decl
)))
2218 case QUAL_UNION_TYPE
:
2219 /* When not optimizing, disregard register keyword for variables with
2220 types containing methods, otherwise the methods won't be callable
2221 from the debugger. */
2222 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))))
2232 /* Structures to communicate between the subroutines of assign_parms.
2233 The first holds data persistent across all parameters, the second
2234 is cleared out for each parameter. */
2236 struct assign_parm_data_all
2238 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2239 should become a job of the target or otherwise encapsulated. */
2240 CUMULATIVE_ARGS args_so_far_v
;
2241 cumulative_args_t args_so_far
;
2242 struct args_size stack_args_size
;
2243 tree function_result_decl
;
2245 rtx_insn
*first_conversion_insn
;
2246 rtx_insn
*last_conversion_insn
;
2247 HOST_WIDE_INT pretend_args_size
;
2248 HOST_WIDE_INT extra_pretend_bytes
;
2249 int reg_parm_stack_space
;
2252 struct assign_parm_data_one
2258 machine_mode nominal_mode
;
2259 machine_mode passed_mode
;
2260 machine_mode promoted_mode
;
2261 struct locate_and_pad_arg_data locate
;
2263 BOOL_BITFIELD named_arg
: 1;
2264 BOOL_BITFIELD passed_pointer
: 1;
2265 BOOL_BITFIELD on_stack
: 1;
2266 BOOL_BITFIELD loaded_in_reg
: 1;
2269 struct bounds_parm_data
2271 assign_parm_data_one parm_data
;
2278 /* A subroutine of assign_parms. Initialize ALL. */
2281 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2283 tree fntype ATTRIBUTE_UNUSED
;
2285 memset (all
, 0, sizeof (*all
));
2287 fntype
= TREE_TYPE (current_function_decl
);
2289 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2290 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2292 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2293 current_function_decl
, -1);
2295 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2297 #ifdef INCOMING_REG_PARM_STACK_SPACE
2298 all
->reg_parm_stack_space
2299 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2303 /* If ARGS contains entries with complex types, split the entry into two
2304 entries of the component type. Return a new list of substitutions are
2305 needed, else the old list. */
2308 split_complex_args (vec
<tree
> *args
)
2313 FOR_EACH_VEC_ELT (*args
, i
, p
)
2315 tree type
= TREE_TYPE (p
);
2316 if (TREE_CODE (type
) == COMPLEX_TYPE
2317 && targetm
.calls
.split_complex_arg (type
))
2320 tree subtype
= TREE_TYPE (type
);
2321 bool addressable
= TREE_ADDRESSABLE (p
);
2323 /* Rewrite the PARM_DECL's type with its component. */
2325 TREE_TYPE (p
) = subtype
;
2326 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2327 DECL_MODE (p
) = VOIDmode
;
2328 DECL_SIZE (p
) = NULL
;
2329 DECL_SIZE_UNIT (p
) = NULL
;
2330 /* If this arg must go in memory, put it in a pseudo here.
2331 We can't allow it to go in memory as per normal parms,
2332 because the usual place might not have the imag part
2333 adjacent to the real part. */
2334 DECL_ARTIFICIAL (p
) = addressable
;
2335 DECL_IGNORED_P (p
) = addressable
;
2336 TREE_ADDRESSABLE (p
) = 0;
2340 /* Build a second synthetic decl. */
2341 decl
= build_decl (EXPR_LOCATION (p
),
2342 PARM_DECL
, NULL_TREE
, subtype
);
2343 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2344 DECL_ARTIFICIAL (decl
) = addressable
;
2345 DECL_IGNORED_P (decl
) = addressable
;
2346 layout_decl (decl
, 0);
2347 args
->safe_insert (++i
, decl
);
2352 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2353 the hidden struct return argument, and (abi willing) complex args.
2354 Return the new parameter list. */
2357 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2359 tree fndecl
= current_function_decl
;
2360 tree fntype
= TREE_TYPE (fndecl
);
2361 vec
<tree
> fnargs
= vNULL
;
2364 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2365 fnargs
.safe_push (arg
);
2367 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2369 /* If struct value address is treated as the first argument, make it so. */
2370 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2371 && ! cfun
->returns_pcc_struct
2372 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2374 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2377 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2378 PARM_DECL
, get_identifier (".result_ptr"), type
);
2379 DECL_ARG_TYPE (decl
) = type
;
2380 DECL_ARTIFICIAL (decl
) = 1;
2381 DECL_NAMELESS (decl
) = 1;
2382 TREE_CONSTANT (decl
) = 1;
2383 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2384 changes, the end of the RESULT_DECL handling block in
2385 use_register_for_decl must be adjusted to match. */
2387 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2388 all
->orig_fnargs
= decl
;
2389 fnargs
.safe_insert (0, decl
);
2391 all
->function_result_decl
= decl
;
2393 /* If function is instrumented then bounds of the
2394 passed structure address is the second argument. */
2395 if (chkp_function_instrumented_p (fndecl
))
2397 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2398 PARM_DECL
, get_identifier (".result_bnd"),
2399 pointer_bounds_type_node
);
2400 DECL_ARG_TYPE (decl
) = pointer_bounds_type_node
;
2401 DECL_ARTIFICIAL (decl
) = 1;
2402 DECL_NAMELESS (decl
) = 1;
2403 TREE_CONSTANT (decl
) = 1;
2405 DECL_CHAIN (decl
) = DECL_CHAIN (all
->orig_fnargs
);
2406 DECL_CHAIN (all
->orig_fnargs
) = decl
;
2407 fnargs
.safe_insert (1, decl
);
2411 /* If the target wants to split complex arguments into scalars, do so. */
2412 if (targetm
.calls
.split_complex_arg
)
2413 split_complex_args (&fnargs
);
2418 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2419 data for the parameter. Incorporate ABI specifics such as pass-by-
2420 reference and type promotion. */
2423 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2424 struct assign_parm_data_one
*data
)
2426 tree nominal_type
, passed_type
;
2427 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2430 memset (data
, 0, sizeof (*data
));
2432 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2434 data
->named_arg
= 1; /* No variadic parms. */
2435 else if (DECL_CHAIN (parm
))
2436 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2437 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2438 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2440 data
->named_arg
= 0; /* Treat as variadic. */
2442 nominal_type
= TREE_TYPE (parm
);
2443 passed_type
= DECL_ARG_TYPE (parm
);
2445 /* Look out for errors propagating this far. Also, if the parameter's
2446 type is void then its value doesn't matter. */
2447 if (TREE_TYPE (parm
) == error_mark_node
2448 /* This can happen after weird syntax errors
2449 or if an enum type is defined among the parms. */
2450 || TREE_CODE (parm
) != PARM_DECL
2451 || passed_type
== NULL
2452 || VOID_TYPE_P (nominal_type
))
2454 nominal_type
= passed_type
= void_type_node
;
2455 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2459 /* Find mode of arg as it is passed, and mode of arg as it should be
2460 during execution of this function. */
2461 passed_mode
= TYPE_MODE (passed_type
);
2462 nominal_mode
= TYPE_MODE (nominal_type
);
2464 /* If the parm is to be passed as a transparent union or record, use the
2465 type of the first field for the tests below. We have already verified
2466 that the modes are the same. */
2467 if ((TREE_CODE (passed_type
) == UNION_TYPE
2468 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2469 && TYPE_TRANSPARENT_AGGR (passed_type
))
2470 passed_type
= TREE_TYPE (first_field (passed_type
));
2472 /* See if this arg was passed by invisible reference. */
2473 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2474 passed_type
, data
->named_arg
))
2476 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2477 data
->passed_pointer
= true;
2478 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2481 /* Find mode as it is passed by the ABI. */
2482 unsignedp
= TYPE_UNSIGNED (passed_type
);
2483 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2484 TREE_TYPE (current_function_decl
), 0);
2487 data
->nominal_type
= nominal_type
;
2488 data
->passed_type
= passed_type
;
2489 data
->nominal_mode
= nominal_mode
;
2490 data
->passed_mode
= passed_mode
;
2491 data
->promoted_mode
= promoted_mode
;
2494 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2497 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2498 struct assign_parm_data_one
*data
, bool no_rtl
)
2500 int varargs_pretend_bytes
= 0;
2502 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2503 data
->promoted_mode
,
2505 &varargs_pretend_bytes
, no_rtl
);
2507 /* If the back-end has requested extra stack space, record how much is
2508 needed. Do not change pretend_args_size otherwise since it may be
2509 nonzero from an earlier partial argument. */
2510 if (varargs_pretend_bytes
> 0)
2511 all
->pretend_args_size
= varargs_pretend_bytes
;
2514 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2515 the incoming location of the current parameter. */
2518 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2519 struct assign_parm_data_one
*data
)
2521 HOST_WIDE_INT pretend_bytes
= 0;
2525 if (data
->promoted_mode
== VOIDmode
)
2527 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2531 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2532 data
->promoted_mode
,
2536 if (entry_parm
== 0)
2537 data
->promoted_mode
= data
->passed_mode
;
2539 /* Determine parm's home in the stack, in case it arrives in the stack
2540 or we should pretend it did. Compute the stack position and rtx where
2541 the argument arrives and its size.
2543 There is one complexity here: If this was a parameter that would
2544 have been passed in registers, but wasn't only because it is
2545 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2546 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2547 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2548 as it was the previous time. */
2549 in_regs
= (entry_parm
!= 0) || POINTER_BOUNDS_TYPE_P (data
->passed_type
);
2550 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2553 if (!in_regs
&& !data
->named_arg
)
2555 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2558 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2559 data
->promoted_mode
,
2560 data
->passed_type
, true);
2561 in_regs
= tem
!= NULL
;
2565 /* If this parameter was passed both in registers and in the stack, use
2566 the copy on the stack. */
2567 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2575 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2576 data
->promoted_mode
,
2579 data
->partial
= partial
;
2581 /* The caller might already have allocated stack space for the
2582 register parameters. */
2583 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2585 /* Part of this argument is passed in registers and part
2586 is passed on the stack. Ask the prologue code to extend
2587 the stack part so that we can recreate the full value.
2589 PRETEND_BYTES is the size of the registers we need to store.
2590 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2591 stack space that the prologue should allocate.
2593 Internally, gcc assumes that the argument pointer is aligned
2594 to STACK_BOUNDARY bits. This is used both for alignment
2595 optimizations (see init_emit) and to locate arguments that are
2596 aligned to more than PARM_BOUNDARY bits. We must preserve this
2597 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2598 a stack boundary. */
2600 /* We assume at most one partial arg, and it must be the first
2601 argument on the stack. */
2602 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2604 pretend_bytes
= partial
;
2605 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2607 /* We want to align relative to the actual stack pointer, so
2608 don't include this in the stack size until later. */
2609 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2613 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2614 all
->reg_parm_stack_space
,
2615 entry_parm
? data
->partial
: 0, current_function_decl
,
2616 &all
->stack_args_size
, &data
->locate
);
2618 /* Update parm_stack_boundary if this parameter is passed in the
2620 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2621 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2623 /* Adjust offsets to include the pretend args. */
2624 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2625 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2626 data
->locate
.offset
.constant
+= pretend_bytes
;
2628 data
->entry_parm
= entry_parm
;
2631 /* A subroutine of assign_parms. If there is actually space on the stack
2632 for this parm, count it in stack_args_size and return true. */
2635 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2636 struct assign_parm_data_one
*data
)
2638 /* Bounds are never passed on the stack to keep compatibility
2639 with not instrumented code. */
2640 if (POINTER_BOUNDS_TYPE_P (data
->passed_type
))
2642 /* Trivially true if we've no incoming register. */
2643 else if (data
->entry_parm
== NULL
)
2645 /* Also true if we're partially in registers and partially not,
2646 since we've arranged to drop the entire argument on the stack. */
2647 else if (data
->partial
!= 0)
2649 /* Also true if the target says that it's passed in both registers
2650 and on the stack. */
2651 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2652 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2654 /* Also true if the target says that there's stack allocated for
2655 all register parameters. */
2656 else if (all
->reg_parm_stack_space
> 0)
2658 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2662 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2663 if (data
->locate
.size
.var
)
2664 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2669 /* A subroutine of assign_parms. Given that this parameter is allocated
2670 stack space by the ABI, find it. */
2673 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2675 rtx offset_rtx
, stack_parm
;
2676 unsigned int align
, boundary
;
2678 /* If we're passing this arg using a reg, make its stack home the
2679 aligned stack slot. */
2680 if (data
->entry_parm
)
2681 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2683 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2685 stack_parm
= crtl
->args
.internal_arg_pointer
;
2686 if (offset_rtx
!= const0_rtx
)
2687 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2688 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2690 if (!data
->passed_pointer
)
2692 set_mem_attributes (stack_parm
, parm
, 1);
2693 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2694 while promoted mode's size is needed. */
2695 if (data
->promoted_mode
!= BLKmode
2696 && data
->promoted_mode
!= DECL_MODE (parm
))
2698 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2699 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2701 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2702 data
->promoted_mode
);
2704 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2709 boundary
= data
->locate
.boundary
;
2710 align
= BITS_PER_UNIT
;
2712 /* If we're padding upward, we know that the alignment of the slot
2713 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2714 intentionally forcing upward padding. Otherwise we have to come
2715 up with a guess at the alignment based on OFFSET_RTX. */
2716 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2718 else if (CONST_INT_P (offset_rtx
))
2720 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2721 align
= align
& -align
;
2723 set_mem_align (stack_parm
, align
);
2725 if (data
->entry_parm
)
2726 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2728 data
->stack_parm
= stack_parm
;
2731 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2732 always valid and contiguous. */
2735 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2737 rtx entry_parm
= data
->entry_parm
;
2738 rtx stack_parm
= data
->stack_parm
;
2740 /* If this parm was passed part in regs and part in memory, pretend it
2741 arrived entirely in memory by pushing the register-part onto the stack.
2742 In the special case of a DImode or DFmode that is split, we could put
2743 it together in a pseudoreg directly, but for now that's not worth
2745 if (data
->partial
!= 0)
2747 /* Handle calls that pass values in multiple non-contiguous
2748 locations. The Irix 6 ABI has examples of this. */
2749 if (GET_CODE (entry_parm
) == PARALLEL
)
2750 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2752 int_size_in_bytes (data
->passed_type
));
2755 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2756 move_block_from_reg (REGNO (entry_parm
),
2757 validize_mem (copy_rtx (stack_parm
)),
2758 data
->partial
/ UNITS_PER_WORD
);
2761 entry_parm
= stack_parm
;
2764 /* If we didn't decide this parm came in a register, by default it came
2766 else if (entry_parm
== NULL
)
2767 entry_parm
= stack_parm
;
2769 /* When an argument is passed in multiple locations, we can't make use
2770 of this information, but we can save some copying if the whole argument
2771 is passed in a single register. */
2772 else if (GET_CODE (entry_parm
) == PARALLEL
2773 && data
->nominal_mode
!= BLKmode
2774 && data
->passed_mode
!= BLKmode
)
2776 size_t i
, len
= XVECLEN (entry_parm
, 0);
2778 for (i
= 0; i
< len
; i
++)
2779 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2780 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2781 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2782 == data
->passed_mode
)
2783 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2785 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2790 data
->entry_parm
= entry_parm
;
2793 /* A subroutine of assign_parms. Reconstitute any values which were
2794 passed in multiple registers and would fit in a single register. */
2797 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2799 rtx entry_parm
= data
->entry_parm
;
2801 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2802 This can be done with register operations rather than on the
2803 stack, even if we will store the reconstituted parameter on the
2805 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2807 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2808 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2809 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2810 entry_parm
= parmreg
;
2813 data
->entry_parm
= entry_parm
;
2816 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2817 always valid and properly aligned. */
2820 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2822 rtx stack_parm
= data
->stack_parm
;
2824 /* If we can't trust the parm stack slot to be aligned enough for its
2825 ultimate type, don't use that slot after entry. We'll make another
2826 stack slot, if we need one. */
2828 && ((STRICT_ALIGNMENT
2829 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2830 || (data
->nominal_type
2831 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2832 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2835 /* If parm was passed in memory, and we need to convert it on entry,
2836 don't store it back in that same slot. */
2837 else if (data
->entry_parm
== stack_parm
2838 && data
->nominal_mode
!= BLKmode
2839 && data
->nominal_mode
!= data
->passed_mode
)
2842 /* If stack protection is in effect for this function, don't leave any
2843 pointers in their passed stack slots. */
2844 else if (crtl
->stack_protect_guard
2845 && (flag_stack_protect
== 2
2846 || data
->passed_pointer
2847 || POINTER_TYPE_P (data
->nominal_type
)))
2850 data
->stack_parm
= stack_parm
;
2853 /* A subroutine of assign_parms. Return true if the current parameter
2854 should be stored as a BLKmode in the current frame. */
2857 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2859 if (data
->nominal_mode
== BLKmode
)
2861 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2864 #ifdef BLOCK_REG_PADDING
2865 /* Only assign_parm_setup_block knows how to deal with register arguments
2866 that are padded at the least significant end. */
2867 if (REG_P (data
->entry_parm
)
2868 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2869 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2870 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2877 /* A subroutine of assign_parms. Arrange for the parameter to be
2878 present and valid in DATA->STACK_RTL. */
2881 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2882 tree parm
, struct assign_parm_data_one
*data
)
2884 rtx entry_parm
= data
->entry_parm
;
2885 rtx stack_parm
= data
->stack_parm
;
2886 rtx target_reg
= NULL_RTX
;
2887 bool in_conversion_seq
= false;
2889 HOST_WIDE_INT size_stored
;
2891 if (GET_CODE (entry_parm
) == PARALLEL
)
2892 entry_parm
= emit_group_move_into_temps (entry_parm
);
2894 /* If we want the parameter in a pseudo, don't use a stack slot. */
2895 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2897 tree def
= ssa_default_def (cfun
, parm
);
2899 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2900 rtx reg
= gen_reg_rtx (mode
);
2901 if (GET_CODE (reg
) != CONCAT
)
2906 /* Avoid allocating a stack slot, if there isn't one
2907 preallocated by the ABI. It might seem like we should
2908 always prefer a pseudo, but converting between
2909 floating-point and integer modes goes through the stack
2910 on various machines, so it's better to use the reserved
2911 stack slot than to risk wasting it and allocating more
2912 for the conversion. */
2913 if (stack_parm
== NULL_RTX
)
2915 int save
= generating_concat_p
;
2916 generating_concat_p
= 0;
2917 stack_parm
= gen_reg_rtx (mode
);
2918 generating_concat_p
= save
;
2921 data
->stack_parm
= NULL
;
2924 size
= int_size_in_bytes (data
->passed_type
);
2925 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2926 if (stack_parm
== 0)
2928 SET_DECL_ALIGN (parm
, MAX (DECL_ALIGN (parm
), BITS_PER_WORD
));
2929 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2931 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2932 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2933 set_mem_attributes (stack_parm
, parm
, 1);
2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2937 calls that pass values in multiple non-contiguous locations. */
2938 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2942 /* Note that we will be storing an integral number of words.
2943 So we have to be careful to ensure that we allocate an
2944 integral number of words. We do this above when we call
2945 assign_stack_local if space was not allocated in the argument
2946 list. If it was, this will not work if PARM_BOUNDARY is not
2947 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2948 if it becomes a problem. Exception is when BLKmode arrives
2949 with arguments not conforming to word_mode. */
2951 if (data
->stack_parm
== 0)
2953 else if (GET_CODE (entry_parm
) == PARALLEL
)
2956 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2958 mem
= validize_mem (copy_rtx (stack_parm
));
2960 /* Handle values in multiple non-contiguous locations. */
2961 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2962 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2963 else if (GET_CODE (entry_parm
) == PARALLEL
)
2965 push_to_sequence2 (all
->first_conversion_insn
,
2966 all
->last_conversion_insn
);
2967 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2968 all
->first_conversion_insn
= get_insns ();
2969 all
->last_conversion_insn
= get_last_insn ();
2971 in_conversion_seq
= true;
2977 /* If SIZE is that of a mode no bigger than a word, just use
2978 that mode's store operation. */
2979 else if (size
<= UNITS_PER_WORD
)
2982 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2985 #ifdef BLOCK_REG_PADDING
2986 && (size
== UNITS_PER_WORD
2987 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2988 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2994 /* We are really truncating a word_mode value containing
2995 SIZE bytes into a value of mode MODE. If such an
2996 operation requires no actual instructions, we can refer
2997 to the value directly in mode MODE, otherwise we must
2998 start with the register in word_mode and explicitly
3000 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
3001 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3004 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3005 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3007 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3010 #ifdef BLOCK_REG_PADDING
3011 /* Storing the register in memory as a full word, as
3012 move_block_from_reg below would do, and then using the
3013 MEM in a smaller mode, has the effect of shifting right
3014 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3015 shifting must be explicit. */
3016 else if (!MEM_P (mem
))
3020 /* If the assert below fails, we should have taken the
3021 mode != BLKmode path above, unless we have downward
3022 padding of smaller-than-word arguments on a machine
3023 with little-endian bytes, which would likely require
3024 additional changes to work correctly. */
3025 gcc_checking_assert (BYTES_BIG_ENDIAN
3026 && (BLOCK_REG_PADDING (mode
,
3027 data
->passed_type
, 1)
3030 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3032 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3033 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3035 x
= force_reg (word_mode
, x
);
3036 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3038 emit_move_insn (mem
, x
);
3042 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3043 machine must be aligned to the left before storing
3044 to memory. Note that the previous test doesn't
3045 handle all cases (e.g. SIZE == 3). */
3046 else if (size
!= UNITS_PER_WORD
3047 #ifdef BLOCK_REG_PADDING
3048 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
3056 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3057 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3059 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3060 tem
= change_address (mem
, word_mode
, 0);
3061 emit_move_insn (tem
, x
);
3064 move_block_from_reg (REGNO (entry_parm
), mem
,
3065 size_stored
/ UNITS_PER_WORD
);
3067 else if (!MEM_P (mem
))
3069 gcc_checking_assert (size
> UNITS_PER_WORD
);
3070 #ifdef BLOCK_REG_PADDING
3071 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3072 data
->passed_type
, 0)
3075 emit_move_insn (mem
, entry_parm
);
3078 move_block_from_reg (REGNO (entry_parm
), mem
,
3079 size_stored
/ UNITS_PER_WORD
);
3081 else if (data
->stack_parm
== 0)
3083 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3084 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3086 all
->first_conversion_insn
= get_insns ();
3087 all
->last_conversion_insn
= get_last_insn ();
3089 in_conversion_seq
= true;
3094 if (!in_conversion_seq
)
3095 emit_move_insn (target_reg
, stack_parm
);
3098 push_to_sequence2 (all
->first_conversion_insn
,
3099 all
->last_conversion_insn
);
3100 emit_move_insn (target_reg
, stack_parm
);
3101 all
->first_conversion_insn
= get_insns ();
3102 all
->last_conversion_insn
= get_last_insn ();
3105 stack_parm
= target_reg
;
3108 data
->stack_parm
= stack_parm
;
3109 set_parm_rtl (parm
, stack_parm
);
3112 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3113 parameter. Get it there. Perform all ABI specified conversions. */
3116 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3117 struct assign_parm_data_one
*data
)
3119 rtx parmreg
, validated_mem
;
3120 rtx equiv_stack_parm
;
3121 machine_mode promoted_nominal_mode
;
3122 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3123 bool did_conversion
= false;
3124 bool need_conversion
, moved
;
3127 /* Store the parm in a pseudoregister during the function, but we may
3128 need to do it in a wider mode. Using 2 here makes the result
3129 consistent with promote_decl_mode and thus expand_expr_real_1. */
3130 promoted_nominal_mode
3131 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3132 TREE_TYPE (current_function_decl
), 2);
3134 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3135 if (!DECL_ARTIFICIAL (parm
))
3136 mark_user_reg (parmreg
);
3138 /* If this was an item that we received a pointer to,
3139 set rtl appropriately. */
3140 if (data
->passed_pointer
)
3142 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3143 set_mem_attributes (rtl
, parm
, 1);
3148 assign_parm_remove_parallels (data
);
3150 /* Copy the value into the register, thus bridging between
3151 assign_parm_find_data_types and expand_expr_real_1. */
3153 equiv_stack_parm
= data
->stack_parm
;
3154 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3156 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3157 || promoted_nominal_mode
!= data
->promoted_mode
);
3161 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3162 && data
->nominal_mode
== data
->passed_mode
3163 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3165 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3166 mode, by the caller. We now have to convert it to
3167 NOMINAL_MODE, if different. However, PARMREG may be in
3168 a different mode than NOMINAL_MODE if it is being stored
3171 If ENTRY_PARM is a hard register, it might be in a register
3172 not valid for operating in its mode (e.g., an odd-numbered
3173 register for a DFmode). In that case, moves are the only
3174 thing valid, so we can't do a convert from there. This
3175 occurs when the calling sequence allow such misaligned
3178 In addition, the conversion may involve a call, which could
3179 clobber parameters which haven't been copied to pseudo
3182 First, we try to emit an insn which performs the necessary
3183 conversion. We verify that this insn does not clobber any
3186 enum insn_code icode
;
3189 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3193 op1
= validated_mem
;
3194 if (icode
!= CODE_FOR_nothing
3195 && insn_operand_matches (icode
, 0, op0
)
3196 && insn_operand_matches (icode
, 1, op1
))
3198 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3199 rtx_insn
*insn
, *insns
;
3201 HARD_REG_SET hardregs
;
3204 /* If op1 is a hard register that is likely spilled, first
3205 force it into a pseudo, otherwise combiner might extend
3206 its lifetime too much. */
3207 if (GET_CODE (t
) == SUBREG
)
3210 && HARD_REGISTER_P (t
)
3211 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3212 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3214 t
= gen_reg_rtx (GET_MODE (op1
));
3215 emit_move_insn (t
, op1
);
3219 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3220 data
->passed_mode
, unsignedp
);
3222 insns
= get_insns ();
3225 CLEAR_HARD_REG_SET (hardregs
);
3226 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3229 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3231 if (!hard_reg_set_empty_p (hardregs
))
3240 if (equiv_stack_parm
!= NULL_RTX
)
3241 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3248 /* Nothing to do. */
3250 else if (need_conversion
)
3252 /* We did not have an insn to convert directly, or the sequence
3253 generated appeared unsafe. We must first copy the parm to a
3254 pseudo reg, and save the conversion until after all
3255 parameters have been moved. */
3258 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3260 emit_move_insn (tempreg
, validated_mem
);
3262 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3263 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3265 if (GET_CODE (tempreg
) == SUBREG
3266 && GET_MODE (tempreg
) == data
->nominal_mode
3267 && REG_P (SUBREG_REG (tempreg
))
3268 && data
->nominal_mode
== data
->passed_mode
3269 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3270 && GET_MODE_SIZE (GET_MODE (tempreg
))
3271 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3273 /* The argument is already sign/zero extended, so note it
3275 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3276 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3279 /* TREE_USED gets set erroneously during expand_assignment. */
3280 save_tree_used
= TREE_USED (parm
);
3281 SET_DECL_RTL (parm
, rtl
);
3282 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3283 SET_DECL_RTL (parm
, NULL_RTX
);
3284 TREE_USED (parm
) = save_tree_used
;
3285 all
->first_conversion_insn
= get_insns ();
3286 all
->last_conversion_insn
= get_last_insn ();
3289 did_conversion
= true;
3292 emit_move_insn (parmreg
, validated_mem
);
3294 /* If we were passed a pointer but the actual value can safely live
3295 in a register, retrieve it and use it directly. */
3296 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3298 /* We can't use nominal_mode, because it will have been set to
3299 Pmode above. We must use the actual mode of the parm. */
3300 if (use_register_for_decl (parm
))
3302 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3303 mark_user_reg (parmreg
);
3307 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3308 TYPE_MODE (TREE_TYPE (parm
)),
3309 TYPE_ALIGN (TREE_TYPE (parm
)));
3311 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3312 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3314 set_mem_attributes (parmreg
, parm
, 1);
3317 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3318 the debug info in case it is not legitimate. */
3319 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3321 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3322 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3324 push_to_sequence2 (all
->first_conversion_insn
,
3325 all
->last_conversion_insn
);
3326 emit_move_insn (tempreg
, rtl
);
3327 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3328 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3330 all
->first_conversion_insn
= get_insns ();
3331 all
->last_conversion_insn
= get_last_insn ();
3334 did_conversion
= true;
3337 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3341 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3343 data
->stack_parm
= NULL
;
3346 set_parm_rtl (parm
, rtl
);
3348 /* Mark the register as eliminable if we did no conversion and it was
3349 copied from memory at a fixed offset, and the arg pointer was not
3350 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3351 offset formed an invalid address, such memory-equivalences as we
3352 make here would screw up life analysis for it. */
3353 if (data
->nominal_mode
== data
->passed_mode
3355 && data
->stack_parm
!= 0
3356 && MEM_P (data
->stack_parm
)
3357 && data
->locate
.offset
.var
== 0
3358 && reg_mentioned_p (virtual_incoming_args_rtx
,
3359 XEXP (data
->stack_parm
, 0)))
3361 rtx_insn
*linsn
= get_last_insn ();
3365 /* Mark complex types separately. */
3366 if (GET_CODE (parmreg
) == CONCAT
)
3368 machine_mode submode
3369 = GET_MODE_INNER (GET_MODE (parmreg
));
3370 int regnor
= REGNO (XEXP (parmreg
, 0));
3371 int regnoi
= REGNO (XEXP (parmreg
, 1));
3372 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3373 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3374 GET_MODE_SIZE (submode
));
3376 /* Scan backwards for the set of the real and
3378 for (sinsn
= linsn
; sinsn
!= 0;
3379 sinsn
= prev_nonnote_insn (sinsn
))
3381 set
= single_set (sinsn
);
3385 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3386 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3387 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3388 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3392 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3395 /* For pointer data type, suggest pointer register. */
3396 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3397 mark_reg_pointer (parmreg
,
3398 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3401 /* A subroutine of assign_parms. Allocate stack space to hold the current
3402 parameter. Get it there. Perform all ABI specified conversions. */
3405 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3406 struct assign_parm_data_one
*data
)
3408 /* Value must be stored in the stack slot STACK_PARM during function
3410 bool to_conversion
= false;
3412 assign_parm_remove_parallels (data
);
3414 if (data
->promoted_mode
!= data
->nominal_mode
)
3416 /* Conversion is required. */
3417 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3419 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3421 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3422 to_conversion
= true;
3424 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3425 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3427 if (data
->stack_parm
)
3429 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3430 GET_MODE (data
->stack_parm
));
3431 /* ??? This may need a big-endian conversion on sparc64. */
3433 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3434 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3435 set_mem_offset (data
->stack_parm
,
3436 MEM_OFFSET (data
->stack_parm
) + offset
);
3440 if (data
->entry_parm
!= data
->stack_parm
)
3444 if (data
->stack_parm
== 0)
3446 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3447 GET_MODE (data
->entry_parm
),
3448 TYPE_ALIGN (data
->passed_type
));
3450 = assign_stack_local (GET_MODE (data
->entry_parm
),
3451 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3453 set_mem_attributes (data
->stack_parm
, parm
, 1);
3456 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3457 src
= validize_mem (copy_rtx (data
->entry_parm
));
3461 /* Use a block move to handle potentially misaligned entry_parm. */
3463 push_to_sequence2 (all
->first_conversion_insn
,
3464 all
->last_conversion_insn
);
3465 to_conversion
= true;
3467 emit_block_move (dest
, src
,
3468 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3474 src
= force_reg (GET_MODE (src
), src
);
3475 emit_move_insn (dest
, src
);
3481 all
->first_conversion_insn
= get_insns ();
3482 all
->last_conversion_insn
= get_last_insn ();
3486 set_parm_rtl (parm
, data
->stack_parm
);
3489 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3490 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3493 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3497 tree orig_fnargs
= all
->orig_fnargs
;
3500 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3502 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3503 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3505 rtx tmp
, real
, imag
;
3506 machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3508 real
= DECL_RTL (fnargs
[i
]);
3509 imag
= DECL_RTL (fnargs
[i
+ 1]);
3510 if (inner
!= GET_MODE (real
))
3512 real
= gen_lowpart_SUBREG (inner
, real
);
3513 imag
= gen_lowpart_SUBREG (inner
, imag
);
3516 if (TREE_ADDRESSABLE (parm
))
3519 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3520 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3522 TYPE_ALIGN (TREE_TYPE (parm
)));
3524 /* split_complex_arg put the real and imag parts in
3525 pseudos. Move them to memory. */
3526 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3527 set_mem_attributes (tmp
, parm
, 1);
3528 rmem
= adjust_address_nv (tmp
, inner
, 0);
3529 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3530 push_to_sequence2 (all
->first_conversion_insn
,
3531 all
->last_conversion_insn
);
3532 emit_move_insn (rmem
, real
);
3533 emit_move_insn (imem
, imag
);
3534 all
->first_conversion_insn
= get_insns ();
3535 all
->last_conversion_insn
= get_last_insn ();
3539 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3540 set_parm_rtl (parm
, tmp
);
3542 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3543 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3544 if (inner
!= GET_MODE (real
))
3546 real
= gen_lowpart_SUBREG (inner
, real
);
3547 imag
= gen_lowpart_SUBREG (inner
, imag
);
3549 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3550 set_decl_incoming_rtl (parm
, tmp
, false);
3556 /* Load bounds of PARM from bounds table. */
3558 assign_parm_load_bounds (struct assign_parm_data_one
*data
,
3564 unsigned i
, offs
= 0;
3566 rtx slot
= NULL
, ptr
= NULL
;
3571 bitmap_obstack_initialize (NULL
);
3572 slots
= BITMAP_ALLOC (NULL
);
3573 chkp_find_bound_slots (TREE_TYPE (parm
), slots
);
3574 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, i
, bi
)
3584 BITMAP_FREE (slots
);
3585 bitmap_obstack_release (NULL
);
3588 /* We may have bounds not associated with any pointer. */
3590 offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
3592 /* Find associated pointer. */
3595 /* If bounds are not associated with any bounds,
3596 then it is passed in a register or special slot. */
3597 gcc_assert (data
->entry_parm
);
3600 else if (MEM_P (entry
))
3601 slot
= adjust_address (entry
, Pmode
, offs
);
3602 else if (REG_P (entry
))
3603 ptr
= gen_rtx_REG (Pmode
, REGNO (entry
) + bnd_no
);
3604 else if (GET_CODE (entry
) == PARALLEL
)
3605 ptr
= chkp_get_value_with_offs (entry
, GEN_INT (offs
));
3608 data
->entry_parm
= targetm
.calls
.load_bounds_for_arg (slot
, ptr
,
3612 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3615 assign_bounds (vec
<bounds_parm_data
> &bndargs
,
3616 struct assign_parm_data_all
&all
,
3617 bool assign_regs
, bool assign_special
,
3621 bounds_parm_data
*pbdata
;
3623 if (!bndargs
.exists ())
3626 /* We make few passes to store input bounds. Firstly handle bounds
3627 passed in registers. After that we load bounds passed in special
3628 slots. Finally we load bounds from Bounds Table. */
3629 for (pass
= 0; pass
< 3; pass
++)
3630 FOR_EACH_VEC_ELT (bndargs
, i
, pbdata
)
3632 /* Pass 0 => regs only. */
3635 ||(!pbdata
->parm_data
.entry_parm
3636 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)))
3638 /* Pass 1 => slots only. */
3641 || (!pbdata
->parm_data
.entry_parm
3642 || GET_CODE (pbdata
->parm_data
.entry_parm
) == REG
)))
3644 /* Pass 2 => BT only. */
3647 || pbdata
->parm_data
.entry_parm
))
3650 if (!pbdata
->parm_data
.entry_parm
3651 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)
3652 assign_parm_load_bounds (&pbdata
->parm_data
, pbdata
->ptr_parm
,
3653 pbdata
->ptr_entry
, pbdata
->bound_no
);
3655 set_decl_incoming_rtl (pbdata
->bounds_parm
,
3656 pbdata
->parm_data
.entry_parm
, false);
3658 if (assign_parm_setup_block_p (&pbdata
->parm_data
))
3659 assign_parm_setup_block (&all
, pbdata
->bounds_parm
,
3660 &pbdata
->parm_data
);
3661 else if (pbdata
->parm_data
.passed_pointer
3662 || use_register_for_decl (pbdata
->bounds_parm
))
3663 assign_parm_setup_reg (&all
, pbdata
->bounds_parm
,
3664 &pbdata
->parm_data
);
3666 assign_parm_setup_stack (&all
, pbdata
->bounds_parm
,
3667 &pbdata
->parm_data
);
3671 /* Assign RTL expressions to the function's parameters. This may involve
3672 copying them into registers and using those registers as the DECL_RTL. */
3675 assign_parms (tree fndecl
)
3677 struct assign_parm_data_all all
;
3680 unsigned i
, bound_no
= 0;
3681 tree last_arg
= NULL
;
3682 rtx last_arg_entry
= NULL
;
3683 vec
<bounds_parm_data
> bndargs
= vNULL
;
3684 bounds_parm_data bdata
;
3686 crtl
->args
.internal_arg_pointer
3687 = targetm
.calls
.internal_arg_pointer ();
3689 assign_parms_initialize_all (&all
);
3690 fnargs
= assign_parms_augmented_arg_list (&all
);
3692 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3694 struct assign_parm_data_one data
;
3696 /* Extract the type of PARM; adjust it according to ABI. */
3697 assign_parm_find_data_types (&all
, parm
, &data
);
3699 /* Early out for errors and void parameters. */
3700 if (data
.passed_mode
== VOIDmode
)
3702 SET_DECL_RTL (parm
, const0_rtx
);
3703 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3707 /* Estimate stack alignment from parameter alignment. */
3708 if (SUPPORTS_STACK_ALIGNMENT
)
3711 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3713 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3715 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3716 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3717 TYPE_MODE (data
.nominal_type
),
3718 TYPE_ALIGN (data
.nominal_type
));
3719 if (crtl
->stack_alignment_estimated
< align
)
3721 gcc_assert (!crtl
->stack_realign_processed
);
3722 crtl
->stack_alignment_estimated
= align
;
3726 /* Find out where the parameter arrives in this function. */
3727 assign_parm_find_entry_rtl (&all
, &data
);
3729 /* Find out where stack space for this parameter might be. */
3730 if (assign_parm_is_stack_parm (&all
, &data
))
3732 assign_parm_find_stack_rtl (parm
, &data
);
3733 assign_parm_adjust_entry_rtl (&data
);
3735 if (!POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3737 /* Remember where last non bounds arg was passed in case
3738 we have to load associated bounds for it from Bounds
3741 last_arg_entry
= data
.entry_parm
;
3744 /* Record permanently how this parm was passed. */
3745 if (data
.passed_pointer
)
3748 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3750 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3753 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3755 assign_parm_adjust_stack_rtl (&data
);
3757 /* Bounds should be loaded in the particular order to
3758 have registers allocated correctly. Collect info about
3759 input bounds and load them later. */
3760 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3762 /* Expect bounds in instrumented functions only. */
3763 gcc_assert (chkp_function_instrumented_p (fndecl
));
3765 bdata
.parm_data
= data
;
3766 bdata
.bounds_parm
= parm
;
3767 bdata
.ptr_parm
= last_arg
;
3768 bdata
.ptr_entry
= last_arg_entry
;
3769 bdata
.bound_no
= bound_no
;
3770 bndargs
.safe_push (bdata
);
3774 if (assign_parm_setup_block_p (&data
))
3775 assign_parm_setup_block (&all
, parm
, &data
);
3776 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3777 assign_parm_setup_reg (&all
, parm
, &data
);
3779 assign_parm_setup_stack (&all
, parm
, &data
);
3782 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3784 int pretend_bytes
= 0;
3786 assign_parms_setup_varargs (&all
, &data
, false);
3788 if (chkp_function_instrumented_p (fndecl
))
3790 /* We expect this is the last parm. Otherwise it is wrong
3791 to assign bounds right now. */
3792 gcc_assert (i
== (fnargs
.length () - 1));
3793 assign_bounds (bndargs
, all
, true, false, false);
3794 targetm
.calls
.setup_incoming_vararg_bounds (all
.args_so_far
,
3799 assign_bounds (bndargs
, all
, false, true, true);
3804 /* Update info on where next arg arrives in registers. */
3805 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3806 data
.passed_type
, data
.named_arg
);
3808 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3812 assign_bounds (bndargs
, all
, true, true, true);
3815 if (targetm
.calls
.split_complex_arg
)
3816 assign_parms_unsplit_complex (&all
, fnargs
);
3820 /* Output all parameter conversion instructions (possibly including calls)
3821 now that all parameters have been copied out of hard registers. */
3822 emit_insn (all
.first_conversion_insn
);
3824 /* Estimate reload stack alignment from scalar return mode. */
3825 if (SUPPORTS_STACK_ALIGNMENT
)
3827 if (DECL_RESULT (fndecl
))
3829 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3830 machine_mode mode
= TYPE_MODE (type
);
3834 && !AGGREGATE_TYPE_P (type
))
3836 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3837 if (crtl
->stack_alignment_estimated
< align
)
3839 gcc_assert (!crtl
->stack_realign_processed
);
3840 crtl
->stack_alignment_estimated
= align
;
3846 /* If we are receiving a struct value address as the first argument, set up
3847 the RTL for the function result. As this might require code to convert
3848 the transmitted address to Pmode, we do this here to ensure that possible
3849 preliminary conversions of the address have been emitted already. */
3850 if (all
.function_result_decl
)
3852 tree result
= DECL_RESULT (current_function_decl
);
3853 rtx addr
= DECL_RTL (all
.function_result_decl
);
3856 if (DECL_BY_REFERENCE (result
))
3858 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3863 SET_DECL_VALUE_EXPR (result
,
3864 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3865 all
.function_result_decl
));
3866 addr
= convert_memory_address (Pmode
, addr
);
3867 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3868 set_mem_attributes (x
, result
, 1);
3871 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3873 set_parm_rtl (result
, x
);
3876 /* We have aligned all the args, so add space for the pretend args. */
3877 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3878 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3879 crtl
->args
.size
= all
.stack_args_size
.constant
;
3881 /* Adjust function incoming argument size for alignment and
3884 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3885 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3886 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3888 if (ARGS_GROW_DOWNWARD
)
3890 crtl
->args
.arg_offset_rtx
3891 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3892 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3893 size_int (-all
.stack_args_size
.constant
)),
3894 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3897 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3899 /* See how many bytes, if any, of its args a function should try to pop
3902 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3906 /* For stdarg.h function, save info about
3907 regs and stack space used by the named args. */
3909 crtl
->args
.info
= all
.args_so_far_v
;
3911 /* Set the rtx used for the function return value. Put this in its
3912 own variable so any optimizers that need this information don't have
3913 to include tree.h. Do this here so it gets done when an inlined
3914 function gets output. */
3917 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3918 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3920 /* If scalar return value was computed in a pseudo-reg, or was a named
3921 return value that got dumped to the stack, copy that to the hard
3923 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3925 tree decl_result
= DECL_RESULT (fndecl
);
3926 rtx decl_rtl
= DECL_RTL (decl_result
);
3928 if (REG_P (decl_rtl
)
3929 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3930 : DECL_REGISTER (decl_result
))
3934 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3936 if (chkp_function_instrumented_p (fndecl
))
3938 = targetm
.calls
.chkp_function_value_bounds (TREE_TYPE (decl_result
),
3940 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3941 /* The delay slot scheduler assumes that crtl->return_rtx
3942 holds the hard register containing the return value, not a
3943 temporary pseudo. */
3944 crtl
->return_rtx
= real_decl_rtl
;
3949 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3950 For all seen types, gimplify their sizes. */
3953 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3960 if (POINTER_TYPE_P (t
))
3962 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3963 && !TYPE_SIZES_GIMPLIFIED (t
))
3965 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3973 /* Gimplify the parameter list for current_function_decl. This involves
3974 evaluating SAVE_EXPRs of variable sized parameters and generating code
3975 to implement callee-copies reference parameters. Returns a sequence of
3976 statements to add to the beginning of the function. */
3979 gimplify_parameters (void)
3981 struct assign_parm_data_all all
;
3983 gimple_seq stmts
= NULL
;
3987 assign_parms_initialize_all (&all
);
3988 fnargs
= assign_parms_augmented_arg_list (&all
);
3990 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3992 struct assign_parm_data_one data
;
3994 /* Extract the type of PARM; adjust it according to ABI. */
3995 assign_parm_find_data_types (&all
, parm
, &data
);
3997 /* Early out for errors and void parameters. */
3998 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
4001 /* Update info on where next arg arrives in registers. */
4002 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
4003 data
.passed_type
, data
.named_arg
);
4005 /* ??? Once upon a time variable_size stuffed parameter list
4006 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4007 turned out to be less than manageable in the gimple world.
4008 Now we have to hunt them down ourselves. */
4009 walk_tree_without_duplicates (&data
.passed_type
,
4010 gimplify_parm_type
, &stmts
);
4012 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
4014 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
4015 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
4018 if (data
.passed_pointer
)
4020 tree type
= TREE_TYPE (data
.passed_type
);
4021 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
4022 type
, data
.named_arg
))
4026 /* For constant-sized objects, this is trivial; for
4027 variable-sized objects, we have to play games. */
4028 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
4029 && !(flag_stack_check
== GENERIC_STACK_CHECK
4030 && compare_tree_int (DECL_SIZE_UNIT (parm
),
4031 STACK_CHECK_MAX_VAR_SIZE
) > 0))
4033 local
= create_tmp_var (type
, get_name (parm
));
4034 DECL_IGNORED_P (local
) = 0;
4035 /* If PARM was addressable, move that flag over
4036 to the local copy, as its address will be taken,
4037 not the PARMs. Keep the parms address taken
4038 as we'll query that flag during gimplification. */
4039 if (TREE_ADDRESSABLE (parm
))
4040 TREE_ADDRESSABLE (local
) = 1;
4041 else if (TREE_CODE (type
) == COMPLEX_TYPE
4042 || TREE_CODE (type
) == VECTOR_TYPE
)
4043 DECL_GIMPLE_REG_P (local
) = 1;
4047 tree ptr_type
, addr
;
4049 ptr_type
= build_pointer_type (type
);
4050 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
4051 DECL_IGNORED_P (addr
) = 0;
4052 local
= build_fold_indirect_ref (addr
);
4054 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4055 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
4056 size_int (DECL_ALIGN (parm
)));
4058 /* The call has been built for a variable-sized object. */
4059 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
4060 t
= fold_convert (ptr_type
, t
);
4061 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
4062 gimplify_and_add (t
, &stmts
);
4065 gimplify_assign (local
, parm
, &stmts
);
4067 SET_DECL_VALUE_EXPR (parm
, local
);
4068 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
4078 /* Compute the size and offset from the start of the stacked arguments for a
4079 parm passed in mode PASSED_MODE and with type TYPE.
4081 INITIAL_OFFSET_PTR points to the current offset into the stacked
4084 The starting offset and size for this parm are returned in
4085 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4086 nonzero, the offset is that of stack slot, which is returned in
4087 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4088 padding required from the initial offset ptr to the stack slot.
4090 IN_REGS is nonzero if the argument will be passed in registers. It will
4091 never be set if REG_PARM_STACK_SPACE is not defined.
4093 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4094 for arguments which are passed in registers.
4096 FNDECL is the function in which the argument was defined.
4098 There are two types of rounding that are done. The first, controlled by
4099 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4100 argument list to be aligned to the specific boundary (in bits). This
4101 rounding affects the initial and starting offsets, but not the argument
4104 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4105 optionally rounds the size of the parm to PARM_BOUNDARY. The
4106 initial offset is not affected by this rounding, while the size always
4107 is and the starting offset may be. */
4109 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4110 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4111 callers pass in the total size of args so far as
4112 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4115 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
4116 int reg_parm_stack_space
, int partial
,
4117 tree fndecl ATTRIBUTE_UNUSED
,
4118 struct args_size
*initial_offset_ptr
,
4119 struct locate_and_pad_arg_data
*locate
)
4122 enum direction where_pad
;
4123 unsigned int boundary
, round_boundary
;
4124 int part_size_in_regs
;
4126 /* If we have found a stack parm before we reach the end of the
4127 area reserved for registers, skip that area. */
4130 if (reg_parm_stack_space
> 0)
4132 if (initial_offset_ptr
->var
)
4134 initial_offset_ptr
->var
4135 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4136 ssize_int (reg_parm_stack_space
));
4137 initial_offset_ptr
->constant
= 0;
4139 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4140 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4144 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4147 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4148 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4149 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4150 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4152 locate
->where_pad
= where_pad
;
4154 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4155 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4156 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4158 locate
->boundary
= boundary
;
4160 if (SUPPORTS_STACK_ALIGNMENT
)
4162 /* stack_alignment_estimated can't change after stack has been
4164 if (crtl
->stack_alignment_estimated
< boundary
)
4166 if (!crtl
->stack_realign_processed
)
4167 crtl
->stack_alignment_estimated
= boundary
;
4170 /* If stack is realigned and stack alignment value
4171 hasn't been finalized, it is OK not to increase
4172 stack_alignment_estimated. The bigger alignment
4173 requirement is recorded in stack_alignment_needed
4175 gcc_assert (!crtl
->stack_realign_finalized
4176 && crtl
->stack_realign_needed
);
4181 /* Remember if the outgoing parameter requires extra alignment on the
4182 calling function side. */
4183 if (crtl
->stack_alignment_needed
< boundary
)
4184 crtl
->stack_alignment_needed
= boundary
;
4185 if (crtl
->preferred_stack_boundary
< boundary
)
4186 crtl
->preferred_stack_boundary
= boundary
;
4188 if (ARGS_GROW_DOWNWARD
)
4190 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4191 if (initial_offset_ptr
->var
)
4192 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4193 initial_offset_ptr
->var
);
4197 if (where_pad
!= none
4198 && (!tree_fits_uhwi_p (sizetree
)
4199 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4200 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4201 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4204 locate
->slot_offset
.constant
+= part_size_in_regs
;
4206 if (!in_regs
|| reg_parm_stack_space
> 0)
4207 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4208 &locate
->alignment_pad
);
4210 locate
->size
.constant
= (-initial_offset_ptr
->constant
4211 - locate
->slot_offset
.constant
);
4212 if (initial_offset_ptr
->var
)
4213 locate
->size
.var
= size_binop (MINUS_EXPR
,
4214 size_binop (MINUS_EXPR
,
4216 initial_offset_ptr
->var
),
4217 locate
->slot_offset
.var
);
4219 /* Pad_below needs the pre-rounded size to know how much to pad
4221 locate
->offset
= locate
->slot_offset
;
4222 if (where_pad
== downward
)
4223 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4228 if (!in_regs
|| reg_parm_stack_space
> 0)
4229 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4230 &locate
->alignment_pad
);
4231 locate
->slot_offset
= *initial_offset_ptr
;
4233 #ifdef PUSH_ROUNDING
4234 if (passed_mode
!= BLKmode
)
4235 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4238 /* Pad_below needs the pre-rounded size to know how much to pad below
4239 so this must be done before rounding up. */
4240 locate
->offset
= locate
->slot_offset
;
4241 if (where_pad
== downward
)
4242 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4244 if (where_pad
!= none
4245 && (!tree_fits_uhwi_p (sizetree
)
4246 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4247 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4249 ADD_PARM_SIZE (locate
->size
, sizetree
);
4251 locate
->size
.constant
-= part_size_in_regs
;
4254 #ifdef FUNCTION_ARG_OFFSET
4255 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
4259 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4260 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4263 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4264 struct args_size
*alignment_pad
)
4266 tree save_var
= NULL_TREE
;
4267 HOST_WIDE_INT save_constant
= 0;
4268 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4269 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
4271 #ifdef SPARC_STACK_BOUNDARY_HACK
4272 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4273 the real alignment of %sp. However, when it does this, the
4274 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4275 if (SPARC_STACK_BOUNDARY_HACK
)
4279 if (boundary
> PARM_BOUNDARY
)
4281 save_var
= offset_ptr
->var
;
4282 save_constant
= offset_ptr
->constant
;
4285 alignment_pad
->var
= NULL_TREE
;
4286 alignment_pad
->constant
= 0;
4288 if (boundary
> BITS_PER_UNIT
)
4290 if (offset_ptr
->var
)
4292 tree sp_offset_tree
= ssize_int (sp_offset
);
4293 tree offset
= size_binop (PLUS_EXPR
,
4294 ARGS_SIZE_TREE (*offset_ptr
),
4297 if (ARGS_GROW_DOWNWARD
)
4298 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4300 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4302 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4303 /* ARGS_SIZE_TREE includes constant term. */
4304 offset_ptr
->constant
= 0;
4305 if (boundary
> PARM_BOUNDARY
)
4306 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4311 offset_ptr
->constant
= -sp_offset
+
4313 ? FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
)
4314 : CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
));
4316 if (boundary
> PARM_BOUNDARY
)
4317 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4323 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4325 if (passed_mode
!= BLKmode
)
4327 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4328 offset_ptr
->constant
4329 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4330 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4331 - GET_MODE_SIZE (passed_mode
));
4335 if (TREE_CODE (sizetree
) != INTEGER_CST
4336 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4338 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4339 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4341 ADD_PARM_SIZE (*offset_ptr
, s2
);
4342 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4348 /* True if register REGNO was alive at a place where `setjmp' was
4349 called and was set more than once or is an argument. Such regs may
4350 be clobbered by `longjmp'. */
4353 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4355 /* There appear to be cases where some local vars never reach the
4356 backend but have bogus regnos. */
4357 if (regno
>= max_reg_num ())
4360 return ((REG_N_SETS (regno
) > 1
4361 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4363 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4366 /* Walk the tree of blocks describing the binding levels within a
4367 function and warn about variables the might be killed by setjmp or
4368 vfork. This is done after calling flow_analysis before register
4369 allocation since that will clobber the pseudo-regs to hard
4373 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4377 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4379 if (TREE_CODE (decl
) == VAR_DECL
4380 && DECL_RTL_SET_P (decl
)
4381 && REG_P (DECL_RTL (decl
))
4382 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4383 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4384 " %<longjmp%> or %<vfork%>", decl
);
4387 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4388 setjmp_vars_warning (setjmp_crosses
, sub
);
4391 /* Do the appropriate part of setjmp_vars_warning
4392 but for arguments instead of local variables. */
4395 setjmp_args_warning (bitmap setjmp_crosses
)
4398 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4399 decl
; decl
= DECL_CHAIN (decl
))
4400 if (DECL_RTL (decl
) != 0
4401 && REG_P (DECL_RTL (decl
))
4402 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4403 warning (OPT_Wclobbered
,
4404 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4408 /* Generate warning messages for variables live across setjmp. */
4411 generate_setjmp_warnings (void)
4413 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4415 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4416 || bitmap_empty_p (setjmp_crosses
))
4419 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4420 setjmp_args_warning (setjmp_crosses
);
4424 /* Reverse the order of elements in the fragment chain T of blocks,
4425 and return the new head of the chain (old last element).
4426 In addition to that clear BLOCK_SAME_RANGE flags when needed
4427 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4428 its super fragment origin. */
4431 block_fragments_nreverse (tree t
)
4433 tree prev
= 0, block
, next
, prev_super
= 0;
4434 tree super
= BLOCK_SUPERCONTEXT (t
);
4435 if (BLOCK_FRAGMENT_ORIGIN (super
))
4436 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4437 for (block
= t
; block
; block
= next
)
4439 next
= BLOCK_FRAGMENT_CHAIN (block
);
4440 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4441 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4442 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4444 BLOCK_SAME_RANGE (block
) = 0;
4445 prev_super
= BLOCK_SUPERCONTEXT (block
);
4446 BLOCK_SUPERCONTEXT (block
) = super
;
4449 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4450 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4452 BLOCK_SAME_RANGE (t
) = 0;
4453 BLOCK_SUPERCONTEXT (t
) = super
;
4457 /* Reverse the order of elements in the chain T of blocks,
4458 and return the new head of the chain (old last element).
4459 Also do the same on subblocks and reverse the order of elements
4460 in BLOCK_FRAGMENT_CHAIN as well. */
4463 blocks_nreverse_all (tree t
)
4465 tree prev
= 0, block
, next
;
4466 for (block
= t
; block
; block
= next
)
4468 next
= BLOCK_CHAIN (block
);
4469 BLOCK_CHAIN (block
) = prev
;
4470 if (BLOCK_FRAGMENT_CHAIN (block
)
4471 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4473 BLOCK_FRAGMENT_CHAIN (block
)
4474 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4475 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4476 BLOCK_SAME_RANGE (block
) = 0;
4478 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4485 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4486 and create duplicate blocks. */
4487 /* ??? Need an option to either create block fragments or to create
4488 abstract origin duplicates of a source block. It really depends
4489 on what optimization has been performed. */
4492 reorder_blocks (void)
4494 tree block
= DECL_INITIAL (current_function_decl
);
4496 if (block
== NULL_TREE
)
4499 auto_vec
<tree
, 10> block_stack
;
4501 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4502 clear_block_marks (block
);
4504 /* Prune the old trees away, so that they don't get in the way. */
4505 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4506 BLOCK_CHAIN (block
) = NULL_TREE
;
4508 /* Recreate the block tree from the note nesting. */
4509 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4510 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4513 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4516 clear_block_marks (tree block
)
4520 TREE_ASM_WRITTEN (block
) = 0;
4521 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4522 block
= BLOCK_CHAIN (block
);
4527 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4528 vec
<tree
> *p_block_stack
)
4531 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4533 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4537 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4539 tree block
= NOTE_BLOCK (insn
);
4542 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4546 BLOCK_SAME_RANGE (prev_end
) = 0;
4547 prev_end
= NULL_TREE
;
4549 /* If we have seen this block before, that means it now
4550 spans multiple address regions. Create a new fragment. */
4551 if (TREE_ASM_WRITTEN (block
))
4553 tree new_block
= copy_node (block
);
4555 BLOCK_SAME_RANGE (new_block
) = 0;
4556 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4557 BLOCK_FRAGMENT_CHAIN (new_block
)
4558 = BLOCK_FRAGMENT_CHAIN (origin
);
4559 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4561 NOTE_BLOCK (insn
) = new_block
;
4565 if (prev_beg
== current_block
&& prev_beg
)
4566 BLOCK_SAME_RANGE (block
) = 1;
4570 BLOCK_SUBBLOCKS (block
) = 0;
4571 TREE_ASM_WRITTEN (block
) = 1;
4572 /* When there's only one block for the entire function,
4573 current_block == block and we mustn't do this, it
4574 will cause infinite recursion. */
4575 if (block
!= current_block
)
4578 if (block
!= origin
)
4579 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4580 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4583 if (p_block_stack
->is_empty ())
4584 super
= current_block
;
4587 super
= p_block_stack
->last ();
4588 gcc_assert (super
== current_block
4589 || BLOCK_FRAGMENT_ORIGIN (super
)
4592 BLOCK_SUPERCONTEXT (block
) = super
;
4593 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4594 BLOCK_SUBBLOCKS (current_block
) = block
;
4595 current_block
= origin
;
4597 p_block_stack
->safe_push (block
);
4599 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4601 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4602 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4603 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4604 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4605 prev_beg
= NULL_TREE
;
4606 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4607 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4612 prev_beg
= NULL_TREE
;
4614 BLOCK_SAME_RANGE (prev_end
) = 0;
4615 prev_end
= NULL_TREE
;
4620 /* Reverse the order of elements in the chain T of blocks,
4621 and return the new head of the chain (old last element). */
4624 blocks_nreverse (tree t
)
4626 tree prev
= 0, block
, next
;
4627 for (block
= t
; block
; block
= next
)
4629 next
= BLOCK_CHAIN (block
);
4630 BLOCK_CHAIN (block
) = prev
;
4636 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4637 by modifying the last node in chain 1 to point to chain 2. */
4640 block_chainon (tree op1
, tree op2
)
4649 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4651 BLOCK_CHAIN (t1
) = op2
;
4653 #ifdef ENABLE_TREE_CHECKING
4656 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4657 gcc_assert (t2
!= t1
);
4664 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4665 non-NULL, list them all into VECTOR, in a depth-first preorder
4666 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4670 all_blocks (tree block
, tree
*vector
)
4676 TREE_ASM_WRITTEN (block
) = 0;
4678 /* Record this block. */
4680 vector
[n_blocks
] = block
;
4684 /* Record the subblocks, and their subblocks... */
4685 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4686 vector
? vector
+ n_blocks
: 0);
4687 block
= BLOCK_CHAIN (block
);
4693 /* Return a vector containing all the blocks rooted at BLOCK. The
4694 number of elements in the vector is stored in N_BLOCKS_P. The
4695 vector is dynamically allocated; it is the caller's responsibility
4696 to call `free' on the pointer returned. */
4699 get_block_vector (tree block
, int *n_blocks_p
)
4703 *n_blocks_p
= all_blocks (block
, NULL
);
4704 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4705 all_blocks (block
, block_vector
);
4707 return block_vector
;
4710 static GTY(()) int next_block_index
= 2;
4712 /* Set BLOCK_NUMBER for all the blocks in FN. */
4715 number_blocks (tree fn
)
4721 /* For SDB and XCOFF debugging output, we start numbering the blocks
4722 from 1 within each function, rather than keeping a running
4724 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
4725 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4726 next_block_index
= 1;
4729 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4731 /* The top-level BLOCK isn't numbered at all. */
4732 for (i
= 1; i
< n_blocks
; ++i
)
4733 /* We number the blocks from two. */
4734 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4736 free (block_vector
);
4741 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4744 debug_find_var_in_block_tree (tree var
, tree block
)
4748 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4752 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4754 tree ret
= debug_find_var_in_block_tree (var
, t
);
4762 /* Keep track of whether we're in a dummy function context. If we are,
4763 we don't want to invoke the set_current_function hook, because we'll
4764 get into trouble if the hook calls target_reinit () recursively or
4765 when the initial initialization is not yet complete. */
4767 static bool in_dummy_function
;
4769 /* Invoke the target hook when setting cfun. Update the optimization options
4770 if the function uses different options than the default. */
4773 invoke_set_current_function_hook (tree fndecl
)
4775 if (!in_dummy_function
)
4777 tree opts
= ((fndecl
)
4778 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4779 : optimization_default_node
);
4782 opts
= optimization_default_node
;
4784 /* Change optimization options if needed. */
4785 if (optimization_current_node
!= opts
)
4787 optimization_current_node
= opts
;
4788 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4791 targetm
.set_current_function (fndecl
);
4792 this_fn_optabs
= this_target_optabs
;
4794 if (opts
!= optimization_default_node
)
4796 init_tree_optimization_optabs (opts
);
4797 if (TREE_OPTIMIZATION_OPTABS (opts
))
4798 this_fn_optabs
= (struct target_optabs
*)
4799 TREE_OPTIMIZATION_OPTABS (opts
);
4804 /* cfun should never be set directly; use this function. */
4807 set_cfun (struct function
*new_cfun
)
4809 if (cfun
!= new_cfun
)
4812 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4813 redirect_edge_var_map_empty ();
4817 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4819 static vec
<function
*> cfun_stack
;
4821 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4822 current_function_decl accordingly. */
4825 push_cfun (struct function
*new_cfun
)
4827 gcc_assert ((!cfun
&& !current_function_decl
)
4828 || (cfun
&& current_function_decl
== cfun
->decl
));
4829 cfun_stack
.safe_push (cfun
);
4830 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4831 set_cfun (new_cfun
);
4834 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4839 struct function
*new_cfun
= cfun_stack
.pop ();
4840 /* When in_dummy_function, we do have a cfun but current_function_decl is
4841 NULL. We also allow pushing NULL cfun and subsequently changing
4842 current_function_decl to something else and have both restored by
4844 gcc_checking_assert (in_dummy_function
4846 || current_function_decl
== cfun
->decl
);
4847 set_cfun (new_cfun
);
4848 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4851 /* Return value of funcdef and increase it. */
4853 get_next_funcdef_no (void)
4855 return funcdef_no
++;
4858 /* Return value of funcdef. */
4860 get_last_funcdef_no (void)
4865 /* Allocate a function structure for FNDECL and set its contents
4866 to the defaults. Set cfun to the newly-allocated object.
4867 Some of the helper functions invoked during initialization assume
4868 that cfun has already been set. Therefore, assign the new object
4869 directly into cfun and invoke the back end hook explicitly at the
4870 very end, rather than initializing a temporary and calling set_cfun
4873 ABSTRACT_P is true if this is a function that will never be seen by
4874 the middle-end. Such functions are front-end concepts (like C++
4875 function templates) that do not correspond directly to functions
4876 placed in object files. */
4879 allocate_struct_function (tree fndecl
, bool abstract_p
)
4881 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4883 cfun
= ggc_cleared_alloc
<function
> ();
4885 init_eh_for_function ();
4887 if (init_machine_status
)
4888 cfun
->machine
= (*init_machine_status
) ();
4890 #ifdef OVERRIDE_ABI_FORMAT
4891 OVERRIDE_ABI_FORMAT (fndecl
);
4894 if (fndecl
!= NULL_TREE
)
4896 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4897 cfun
->decl
= fndecl
;
4898 current_function_funcdef_no
= get_next_funcdef_no ();
4901 invoke_set_current_function_hook (fndecl
);
4903 if (fndecl
!= NULL_TREE
)
4905 tree result
= DECL_RESULT (fndecl
);
4909 /* Now that we have activated any function-specific attributes
4910 that might affect layout, particularly vector modes, relayout
4911 each of the parameters and the result. */
4912 relayout_decl (result
);
4913 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4914 parm
= DECL_CHAIN (parm
))
4915 relayout_decl (parm
);
4917 /* Similarly relayout the function decl. */
4918 targetm
.target_option
.relayout_function (fndecl
);
4921 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4923 #ifdef PCC_STATIC_STRUCT_RETURN
4924 cfun
->returns_pcc_struct
= 1;
4926 cfun
->returns_struct
= 1;
4929 cfun
->stdarg
= stdarg_p (fntype
);
4931 /* Assume all registers in stdarg functions need to be saved. */
4932 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4933 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4935 /* ??? This could be set on a per-function basis by the front-end
4936 but is this worth the hassle? */
4937 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4938 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4940 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4941 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4945 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4946 instead of just setting it. */
4949 push_struct_function (tree fndecl
)
4951 /* When in_dummy_function we might be in the middle of a pop_cfun and
4952 current_function_decl and cfun may not match. */
4953 gcc_assert (in_dummy_function
4954 || (!cfun
&& !current_function_decl
)
4955 || (cfun
&& current_function_decl
== cfun
->decl
));
4956 cfun_stack
.safe_push (cfun
);
4957 current_function_decl
= fndecl
;
4958 allocate_struct_function (fndecl
, false);
4961 /* Reset crtl and other non-struct-function variables to defaults as
4962 appropriate for emitting rtl at the start of a function. */
4965 prepare_function_start (void)
4967 gcc_assert (!get_last_insn ());
4970 init_varasm_status ();
4972 default_rtl_profile ();
4974 if (flag_stack_usage_info
)
4976 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4977 cfun
->su
->static_stack_size
= -1;
4980 cse_not_expected
= ! optimize
;
4982 /* Caller save not needed yet. */
4983 caller_save_needed
= 0;
4985 /* We haven't done register allocation yet. */
4988 /* Indicate that we have not instantiated virtual registers yet. */
4989 virtuals_instantiated
= 0;
4991 /* Indicate that we want CONCATs now. */
4992 generating_concat_p
= 1;
4994 /* Indicate we have no need of a frame pointer yet. */
4995 frame_pointer_needed
= 0;
4999 push_dummy_function (bool with_decl
)
5001 tree fn_decl
, fn_type
, fn_result_decl
;
5003 gcc_assert (!in_dummy_function
);
5004 in_dummy_function
= true;
5008 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
5009 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
5011 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
5012 NULL_TREE
, void_type_node
);
5013 DECL_RESULT (fn_decl
) = fn_result_decl
;
5016 fn_decl
= NULL_TREE
;
5018 push_struct_function (fn_decl
);
5021 /* Initialize the rtl expansion mechanism so that we can do simple things
5022 like generate sequences. This is used to provide a context during global
5023 initialization of some passes. You must call expand_dummy_function_end
5024 to exit this context. */
5027 init_dummy_function_start (void)
5029 push_dummy_function (false);
5030 prepare_function_start ();
5033 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5034 and initialize static variables for generating RTL for the statements
5038 init_function_start (tree subr
)
5040 /* Initialize backend, if needed. */
5043 prepare_function_start ();
5044 decide_function_section (subr
);
5046 /* Warn if this value is an aggregate type,
5047 regardless of which calling convention we are using for it. */
5048 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5049 warning (OPT_Waggregate_return
, "function returns an aggregate");
5052 /* Expand code to verify the stack_protect_guard. This is invoked at
5053 the end of a function to be protected. */
5056 stack_protect_epilogue (void)
5058 tree guard_decl
= targetm
.stack_protect_guard ();
5059 rtx_code_label
*label
= gen_label_rtx ();
5063 x
= expand_normal (crtl
->stack_protect_guard
);
5064 y
= expand_normal (guard_decl
);
5066 /* Allow the target to compare Y with X without leaking either into
5068 if (targetm
.have_stack_protect_test ()
5069 && ((seq
= targetm
.gen_stack_protect_test (x
, y
, label
)) != NULL_RTX
))
5072 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
5074 /* The noreturn predictor has been moved to the tree level. The rtl-level
5075 predictors estimate this branch about 20%, which isn't enough to get
5076 things moved out of line. Since this is the only extant case of adding
5077 a noreturn function at the rtl level, it doesn't seem worth doing ought
5078 except adding the prediction by hand. */
5079 rtx_insn
*tmp
= get_last_insn ();
5081 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
5083 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
5088 /* Start the RTL for a new function, and set variables used for
5090 SUBR is the FUNCTION_DECL node.
5091 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5092 the function's parameters, which must be run at any return statement. */
5095 expand_function_start (tree subr
)
5097 /* Make sure volatile mem refs aren't considered
5098 valid operands of arithmetic insns. */
5099 init_recog_no_volatile ();
5103 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5106 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5108 /* Make the label for return statements to jump to. Do not special
5109 case machines with special return instructions -- they will be
5110 handled later during jump, ifcvt, or epilogue creation. */
5111 return_label
= gen_label_rtx ();
5113 /* Initialize rtx used to return the value. */
5114 /* Do this before assign_parms so that we copy the struct value address
5115 before any library calls that assign parms might generate. */
5117 /* Decide whether to return the value in memory or in a register. */
5118 tree res
= DECL_RESULT (subr
);
5119 if (aggregate_value_p (res
, subr
))
5121 /* Returning something that won't go in a register. */
5122 rtx value_address
= 0;
5124 #ifdef PCC_STATIC_STRUCT_RETURN
5125 if (cfun
->returns_pcc_struct
)
5127 int size
= int_size_in_bytes (TREE_TYPE (res
));
5128 value_address
= assemble_static_space (size
);
5133 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5134 /* Expect to be passed the address of a place to store the value.
5135 If it is passed as an argument, assign_parms will take care of
5139 value_address
= gen_reg_rtx (Pmode
);
5140 emit_move_insn (value_address
, sv
);
5145 rtx x
= value_address
;
5146 if (!DECL_BY_REFERENCE (res
))
5148 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5149 set_mem_attributes (x
, res
, 1);
5151 set_parm_rtl (res
, x
);
5154 else if (DECL_MODE (res
) == VOIDmode
)
5155 /* If return mode is void, this decl rtl should not be used. */
5156 set_parm_rtl (res
, NULL_RTX
);
5159 /* Compute the return values into a pseudo reg, which we will copy
5160 into the true return register after the cleanups are done. */
5161 tree return_type
= TREE_TYPE (res
);
5163 /* If we may coalesce this result, make sure it has the expected mode
5164 in case it was promoted. But we need not bother about BLKmode. */
5165 machine_mode promoted_mode
5166 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5167 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5170 if (promoted_mode
!= BLKmode
)
5171 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5172 else if (TYPE_MODE (return_type
) != BLKmode
5173 && targetm
.calls
.return_in_msb (return_type
))
5174 /* expand_function_end will insert the appropriate padding in
5175 this case. Use the return value's natural (unpadded) mode
5176 within the function proper. */
5177 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5180 /* In order to figure out what mode to use for the pseudo, we
5181 figure out what the mode of the eventual return register will
5182 actually be, and use that. */
5183 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5185 /* Structures that are returned in registers are not
5186 aggregate_value_p, so we may see a PARALLEL or a REG. */
5187 if (REG_P (hard_reg
))
5188 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5191 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5192 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5196 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5197 result to the real return register(s). */
5198 DECL_REGISTER (res
) = 1;
5200 if (chkp_function_instrumented_p (current_function_decl
))
5202 tree return_type
= TREE_TYPE (res
);
5203 rtx bounds
= targetm
.calls
.chkp_function_value_bounds (return_type
,
5205 SET_DECL_BOUNDS_RTL (res
, bounds
);
5209 /* Initialize rtx for parameters and local variables.
5210 In some cases this requires emitting insns. */
5211 assign_parms (subr
);
5213 /* If function gets a static chain arg, store it. */
5214 if (cfun
->static_chain_decl
)
5216 tree parm
= cfun
->static_chain_decl
;
5221 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5222 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5224 set_decl_incoming_rtl (parm
, chain
, false);
5225 set_parm_rtl (parm
, local
);
5226 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5228 if (GET_MODE (local
) != GET_MODE (chain
))
5230 convert_move (local
, chain
, unsignedp
);
5231 insn
= get_last_insn ();
5234 insn
= emit_move_insn (local
, chain
);
5236 /* Mark the register as eliminable, similar to parameters. */
5238 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5239 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5241 /* If we aren't optimizing, save the static chain onto the stack. */
5244 tree saved_static_chain_decl
5245 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5246 DECL_NAME (parm
), TREE_TYPE (parm
));
5247 rtx saved_static_chain_rtx
5248 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5249 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5250 emit_move_insn (saved_static_chain_rtx
, chain
);
5251 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5252 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5256 /* If the function receives a non-local goto, then store the
5257 bits we need to restore the frame pointer. */
5258 if (cfun
->nonlocal_goto_save_area
)
5263 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5264 gcc_assert (DECL_RTL_SET_P (var
));
5266 t_save
= build4 (ARRAY_REF
,
5267 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5268 cfun
->nonlocal_goto_save_area
,
5269 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5270 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5271 gcc_assert (GET_MODE (r_save
) == Pmode
);
5273 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5274 update_nonlocal_goto_save_area ();
5277 /* The following was moved from init_function_start.
5278 The move is supposed to make sdb output more accurate. */
5279 /* Indicate the beginning of the function body,
5280 as opposed to parm setup. */
5281 emit_note (NOTE_INSN_FUNCTION_BEG
);
5283 gcc_assert (NOTE_P (get_last_insn ()));
5285 parm_birth_insn
= get_last_insn ();
5290 PROFILE_HOOK (current_function_funcdef_no
);
5294 /* If we are doing generic stack checking, the probe should go here. */
5295 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5296 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5300 pop_dummy_function (void)
5303 in_dummy_function
= false;
5306 /* Undo the effects of init_dummy_function_start. */
5308 expand_dummy_function_end (void)
5310 gcc_assert (in_dummy_function
);
5312 /* End any sequences that failed to be closed due to syntax errors. */
5313 while (in_sequence_p ())
5316 /* Outside function body, can't compute type's actual size
5317 until next function's body starts. */
5319 free_after_parsing (cfun
);
5320 free_after_compilation (cfun
);
5321 pop_dummy_function ();
5324 /* Helper for diddle_return_value. */
5327 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5332 if (REG_P (outgoing
))
5333 (*doit
) (outgoing
, arg
);
5334 else if (GET_CODE (outgoing
) == PARALLEL
)
5338 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5340 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5342 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5348 /* Call DOIT for each hard register used as a return value from
5349 the current function. */
5352 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5354 diddle_return_value_1 (doit
, arg
, crtl
->return_bnd
);
5355 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5359 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5365 clobber_return_register (void)
5367 diddle_return_value (do_clobber_return_reg
, NULL
);
5369 /* In case we do use pseudo to return value, clobber it too. */
5370 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5372 tree decl_result
= DECL_RESULT (current_function_decl
);
5373 rtx decl_rtl
= DECL_RTL (decl_result
);
5374 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5376 do_clobber_return_reg (decl_rtl
, NULL
);
5382 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5388 use_return_register (void)
5390 diddle_return_value (do_use_return_reg
, NULL
);
5393 /* Set the location of the insn chain starting at INSN to LOC. */
5396 set_insn_locations (rtx_insn
*insn
, int loc
)
5398 while (insn
!= NULL
)
5401 INSN_LOCATION (insn
) = loc
;
5402 insn
= NEXT_INSN (insn
);
5406 /* Generate RTL for the end of the current function. */
5409 expand_function_end (void)
5411 /* If arg_pointer_save_area was referenced only from a nested
5412 function, we will not have initialized it yet. Do that now. */
5413 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5414 get_arg_pointer_save_area ();
5416 /* If we are doing generic stack checking and this function makes calls,
5417 do a stack probe at the start of the function to ensure we have enough
5418 space for another stack frame. */
5419 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5421 rtx_insn
*insn
, *seq
;
5423 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5426 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5428 if (STACK_CHECK_MOVING_SP
)
5429 anti_adjust_stack_and_probe (max_frame_size
, true);
5431 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5434 set_insn_locations (seq
, prologue_location
);
5435 emit_insn_before (seq
, stack_check_probe_note
);
5440 /* End any sequences that failed to be closed due to syntax errors. */
5441 while (in_sequence_p ())
5444 clear_pending_stack_adjust ();
5445 do_pending_stack_adjust ();
5447 /* Output a linenumber for the end of the function.
5448 SDB depends on this. */
5449 set_curr_insn_location (input_location
);
5451 /* Before the return label (if any), clobber the return
5452 registers so that they are not propagated live to the rest of
5453 the function. This can only happen with functions that drop
5454 through; if there had been a return statement, there would
5455 have either been a return rtx, or a jump to the return label.
5457 We delay actual code generation after the current_function_value_rtx
5459 rtx_insn
*clobber_after
= get_last_insn ();
5461 /* Output the label for the actual return from the function. */
5462 emit_label (return_label
);
5464 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5466 /* Let except.c know where it should emit the call to unregister
5467 the function context for sjlj exceptions. */
5468 if (flag_exceptions
)
5469 sjlj_emit_function_exit_after (get_last_insn ());
5473 /* We want to ensure that instructions that may trap are not
5474 moved into the epilogue by scheduling, because we don't
5475 always emit unwind information for the epilogue. */
5476 if (cfun
->can_throw_non_call_exceptions
)
5477 emit_insn (gen_blockage ());
5480 /* If this is an implementation of throw, do what's necessary to
5481 communicate between __builtin_eh_return and the epilogue. */
5482 expand_eh_return ();
5484 /* If scalar return value was computed in a pseudo-reg, or was a named
5485 return value that got dumped to the stack, copy that to the hard
5487 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5489 tree decl_result
= DECL_RESULT (current_function_decl
);
5490 rtx decl_rtl
= DECL_RTL (decl_result
);
5492 if (REG_P (decl_rtl
)
5493 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5494 : DECL_REGISTER (decl_result
))
5496 rtx real_decl_rtl
= crtl
->return_rtx
;
5498 /* This should be set in assign_parms. */
5499 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5501 /* If this is a BLKmode structure being returned in registers,
5502 then use the mode computed in expand_return. Note that if
5503 decl_rtl is memory, then its mode may have been changed,
5504 but that crtl->return_rtx has not. */
5505 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5506 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5508 /* If a non-BLKmode return value should be padded at the least
5509 significant end of the register, shift it left by the appropriate
5510 amount. BLKmode results are handled using the group load/store
5512 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5513 && REG_P (real_decl_rtl
)
5514 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5516 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5517 REGNO (real_decl_rtl
)),
5519 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5521 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5523 /* If expand_function_start has created a PARALLEL for decl_rtl,
5524 move the result to the real return registers. Otherwise, do
5525 a group load from decl_rtl for a named return. */
5526 if (GET_CODE (decl_rtl
) == PARALLEL
)
5527 emit_group_move (real_decl_rtl
, decl_rtl
);
5529 emit_group_load (real_decl_rtl
, decl_rtl
,
5530 TREE_TYPE (decl_result
),
5531 int_size_in_bytes (TREE_TYPE (decl_result
)));
5533 /* In the case of complex integer modes smaller than a word, we'll
5534 need to generate some non-trivial bitfield insertions. Do that
5535 on a pseudo and not the hard register. */
5536 else if (GET_CODE (decl_rtl
) == CONCAT
5537 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5538 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5540 int old_generating_concat_p
;
5543 old_generating_concat_p
= generating_concat_p
;
5544 generating_concat_p
= 0;
5545 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5546 generating_concat_p
= old_generating_concat_p
;
5548 emit_move_insn (tmp
, decl_rtl
);
5549 emit_move_insn (real_decl_rtl
, tmp
);
5551 /* If a named return value dumped decl_return to memory, then
5552 we may need to re-do the PROMOTE_MODE signed/unsigned
5554 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5556 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5557 promote_function_mode (TREE_TYPE (decl_result
),
5558 GET_MODE (decl_rtl
), &unsignedp
,
5559 TREE_TYPE (current_function_decl
), 1);
5561 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5564 emit_move_insn (real_decl_rtl
, decl_rtl
);
5568 /* If returning a structure, arrange to return the address of the value
5569 in a place where debuggers expect to find it.
5571 If returning a structure PCC style,
5572 the caller also depends on this value.
5573 And cfun->returns_pcc_struct is not necessarily set. */
5574 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5575 && !targetm
.calls
.omit_struct_return_reg
)
5577 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5578 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5581 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5582 type
= TREE_TYPE (type
);
5584 value_address
= XEXP (value_address
, 0);
5586 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5587 current_function_decl
, true);
5589 /* Mark this as a function return value so integrate will delete the
5590 assignment and USE below when inlining this function. */
5591 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5593 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5594 value_address
= convert_memory_address (GET_MODE (outgoing
),
5597 emit_move_insn (outgoing
, value_address
);
5599 /* Show return register used to hold result (in this case the address
5601 crtl
->return_rtx
= outgoing
;
5604 /* Emit the actual code to clobber return register. Don't emit
5605 it if clobber_after is a barrier, then the previous basic block
5606 certainly doesn't fall thru into the exit block. */
5607 if (!BARRIER_P (clobber_after
))
5610 clobber_return_register ();
5611 rtx_insn
*seq
= get_insns ();
5614 emit_insn_after (seq
, clobber_after
);
5617 /* Output the label for the naked return from the function. */
5618 if (naked_return_label
)
5619 emit_label (naked_return_label
);
5621 /* @@@ This is a kludge. We want to ensure that instructions that
5622 may trap are not moved into the epilogue by scheduling, because
5623 we don't always emit unwind information for the epilogue. */
5624 if (cfun
->can_throw_non_call_exceptions
5625 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5626 emit_insn (gen_blockage ());
5628 /* If stack protection is enabled for this function, check the guard. */
5629 if (crtl
->stack_protect_guard
)
5630 stack_protect_epilogue ();
5632 /* If we had calls to alloca, and this machine needs
5633 an accurate stack pointer to exit the function,
5634 insert some code to save and restore the stack pointer. */
5635 if (! EXIT_IGNORE_STACK
5636 && cfun
->calls_alloca
)
5641 emit_stack_save (SAVE_FUNCTION
, &tem
);
5642 rtx_insn
*seq
= get_insns ();
5644 emit_insn_before (seq
, parm_birth_insn
);
5646 emit_stack_restore (SAVE_FUNCTION
, tem
);
5649 /* ??? This should no longer be necessary since stupid is no longer with
5650 us, but there are some parts of the compiler (eg reload_combine, and
5651 sh mach_dep_reorg) that still try and compute their own lifetime info
5652 instead of using the general framework. */
5653 use_return_register ();
5657 get_arg_pointer_save_area (void)
5659 rtx ret
= arg_pointer_save_area
;
5663 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5664 arg_pointer_save_area
= ret
;
5667 if (! crtl
->arg_pointer_save_area_init
)
5669 /* Save the arg pointer at the beginning of the function. The
5670 generated stack slot may not be a valid memory address, so we
5671 have to check it and fix it if necessary. */
5673 emit_move_insn (validize_mem (copy_rtx (ret
)),
5674 crtl
->args
.internal_arg_pointer
);
5675 rtx_insn
*seq
= get_insns ();
5678 push_topmost_sequence ();
5679 emit_insn_after (seq
, entry_of_function ());
5680 pop_topmost_sequence ();
5682 crtl
->arg_pointer_save_area_init
= true;
5688 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5689 for the first time. */
5692 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5695 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5698 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5700 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5702 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5703 gcc_assert (*slot
== NULL
);
5708 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5709 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5710 insn, then record COPY as well. */
5713 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5715 hash_table
<insn_cache_hasher
> *hash
;
5718 hash
= epilogue_insn_hash
;
5719 if (!hash
|| !hash
->find (insn
))
5721 hash
= prologue_insn_hash
;
5722 if (!hash
|| !hash
->find (insn
))
5726 slot
= hash
->find_slot (copy
, INSERT
);
5727 gcc_assert (*slot
== NULL
);
5731 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5732 we can be running after reorg, SEQUENCE rtl is possible. */
5735 contains (const_rtx insn
, hash_table
<insn_cache_hasher
> *hash
)
5740 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5742 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5744 for (i
= seq
->len () - 1; i
>= 0; i
--)
5745 if (hash
->find (seq
->element (i
)))
5750 return hash
->find (const_cast<rtx
> (insn
)) != NULL
;
5754 prologue_epilogue_contains (const_rtx insn
)
5756 if (contains (insn
, prologue_insn_hash
))
5758 if (contains (insn
, epilogue_insn_hash
))
5764 /* Set JUMP_LABEL for a return insn. */
5767 set_return_jump_label (rtx_insn
*returnjump
)
5769 rtx pat
= PATTERN (returnjump
);
5770 if (GET_CODE (pat
) == PARALLEL
)
5771 pat
= XVECEXP (pat
, 0, 0);
5772 if (ANY_RETURN_P (pat
))
5773 JUMP_LABEL (returnjump
) = pat
;
5775 JUMP_LABEL (returnjump
) = ret_rtx
;
5778 /* Return a sequence to be used as the split prologue for the current
5779 function, or NULL. */
5782 make_split_prologue_seq (void)
5784 if (!flag_split_stack
5785 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5789 emit_insn (targetm
.gen_split_stack_prologue ());
5790 rtx_insn
*seq
= get_insns ();
5793 record_insns (seq
, NULL
, &prologue_insn_hash
);
5794 set_insn_locations (seq
, prologue_location
);
5799 /* Return a sequence to be used as the prologue for the current function,
5803 make_prologue_seq (void)
5805 if (!targetm
.have_prologue ())
5809 rtx_insn
*seq
= targetm
.gen_prologue ();
5812 /* Insert an explicit USE for the frame pointer
5813 if the profiling is on and the frame pointer is required. */
5814 if (crtl
->profile
&& frame_pointer_needed
)
5815 emit_use (hard_frame_pointer_rtx
);
5817 /* Retain a map of the prologue insns. */
5818 record_insns (seq
, NULL
, &prologue_insn_hash
);
5819 emit_note (NOTE_INSN_PROLOGUE_END
);
5821 /* Ensure that instructions are not moved into the prologue when
5822 profiling is on. The call to the profiling routine can be
5823 emitted within the live range of a call-clobbered register. */
5824 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5825 emit_insn (gen_blockage ());
5829 set_insn_locations (seq
, prologue_location
);
5834 /* Return a sequence to be used as the epilogue for the current function,
5838 make_epilogue_seq (void)
5840 if (!targetm
.have_epilogue ())
5844 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5845 rtx_insn
*seq
= targetm
.gen_epilogue ();
5847 emit_jump_insn (seq
);
5849 /* Retain a map of the epilogue insns. */
5850 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5851 set_insn_locations (seq
, epilogue_location
);
5854 rtx_insn
*returnjump
= get_last_insn ();
5857 if (JUMP_P (returnjump
))
5858 set_return_jump_label (returnjump
);
5864 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5865 this into place with notes indicating where the prologue ends and where
5866 the epilogue begins. Update the basic block information when possible.
5868 Notes on epilogue placement:
5869 There are several kinds of edges to the exit block:
5870 * a single fallthru edge from LAST_BB
5871 * possibly, edges from blocks containing sibcalls
5872 * possibly, fake edges from infinite loops
5874 The epilogue is always emitted on the fallthru edge from the last basic
5875 block in the function, LAST_BB, into the exit block.
5877 If LAST_BB is empty except for a label, it is the target of every
5878 other basic block in the function that ends in a return. If a
5879 target has a return or simple_return pattern (possibly with
5880 conditional variants), these basic blocks can be changed so that a
5881 return insn is emitted into them, and their target is adjusted to
5882 the real exit block.
5884 Notes on shrink wrapping: We implement a fairly conservative
5885 version of shrink-wrapping rather than the textbook one. We only
5886 generate a single prologue and a single epilogue. This is
5887 sufficient to catch a number of interesting cases involving early
5890 First, we identify the blocks that require the prologue to occur before
5891 them. These are the ones that modify a call-saved register, or reference
5892 any of the stack or frame pointer registers. To simplify things, we then
5893 mark everything reachable from these blocks as also requiring a prologue.
5894 This takes care of loops automatically, and avoids the need to examine
5895 whether MEMs reference the frame, since it is sufficient to check for
5896 occurrences of the stack or frame pointer.
5898 We then compute the set of blocks for which the need for a prologue
5899 is anticipatable (borrowing terminology from the shrink-wrapping
5900 description in Muchnick's book). These are the blocks which either
5901 require a prologue themselves, or those that have only successors
5902 where the prologue is anticipatable. The prologue needs to be
5903 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5904 is not. For the moment, we ensure that only one such edge exists.
5906 The epilogue is placed as described above, but we make a
5907 distinction between inserting return and simple_return patterns
5908 when modifying other blocks that end in a return. Blocks that end
5909 in a sibcall omit the sibcall_epilogue if the block is not in
5913 thread_prologue_and_epilogue_insns (void)
5917 /* Can't deal with multiple successors of the entry block at the
5918 moment. Function should always have at least one entry
5920 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5922 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5923 edge orig_entry_edge
= entry_edge
;
5925 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
5926 rtx_insn
*prologue_seq
= make_prologue_seq ();
5927 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
5929 /* Try to perform a kind of shrink-wrapping, making sure the
5930 prologue/epilogue is emitted only around those parts of the
5931 function that require it. */
5933 try_shrink_wrapping (&entry_edge
, prologue_seq
);
5936 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5938 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5939 this marker for the splits of EH_RETURN patterns, and nothing else
5940 uses the flag in the meantime. */
5941 epilogue_completed
= 1;
5943 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5944 some targets, these get split to a special version of the epilogue
5945 code. In order to be able to properly annotate these with unwind
5946 info, try to split them now. If we get a valid split, drop an
5947 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5950 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5952 rtx_insn
*prev
, *last
, *trial
;
5954 if (e
->flags
& EDGE_FALLTHRU
)
5956 last
= BB_END (e
->src
);
5957 if (!eh_returnjump_p (last
))
5960 prev
= PREV_INSN (last
);
5961 trial
= try_split (PATTERN (last
), last
, 1);
5965 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5966 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5969 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5971 if (exit_fallthru_edge
)
5975 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
5976 commit_edge_insertions ();
5978 /* The epilogue insns we inserted may cause the exit edge to no longer
5980 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5982 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5983 && returnjump_p (BB_END (e
->src
)))
5984 e
->flags
&= ~EDGE_FALLTHRU
;
5987 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
5989 /* We have a fall-through edge to the exit block, the source is not
5990 at the end of the function, and there will be an assembler epilogue
5991 at the end of the function.
5992 We can't use force_nonfallthru here, because that would try to
5993 use return. Inserting a jump 'by hand' is extremely messy, so
5994 we take advantage of cfg_layout_finalize using
5995 fixup_fallthru_exit_predecessor. */
5996 cfg_layout_initialize (0);
5998 FOR_EACH_BB_FN (cur_bb
, cfun
)
5999 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6000 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6001 cur_bb
->aux
= cur_bb
->next_bb
;
6002 cfg_layout_finalize ();
6006 /* Insert the prologue. */
6008 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6010 if (split_prologue_seq
|| prologue_seq
)
6012 if (split_prologue_seq
)
6013 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6016 insert_insn_on_edge (prologue_seq
, entry_edge
);
6018 commit_edge_insertions ();
6020 /* Look for basic blocks within the prologue insns. */
6021 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
6022 bitmap_clear (blocks
);
6023 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6024 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6025 find_many_sub_basic_blocks (blocks
);
6028 default_rtl_profile ();
6030 /* Emit sibling epilogues before any sibling call sites. */
6031 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6032 (e
= ei_safe_edge (ei
));
6035 /* Skip those already handled, the ones that run without prologue. */
6036 if (e
->flags
& EDGE_IGNORE
)
6038 e
->flags
&= ~EDGE_IGNORE
;
6042 rtx_insn
*insn
= BB_END (e
->src
);
6044 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6047 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6050 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6052 rtx_insn
*seq
= get_insns ();
6055 /* Retain a map of the epilogue insns. Used in life analysis to
6056 avoid getting rid of sibcall epilogue insns. Do this before we
6057 actually emit the sequence. */
6058 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6059 set_insn_locations (seq
, epilogue_location
);
6061 emit_insn_before (seq
, insn
);
6067 rtx_insn
*insn
, *next
;
6069 /* Similarly, move any line notes that appear after the epilogue.
6070 There is no need, however, to be quite so anal about the existence
6071 of such a note. Also possibly move
6072 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6074 for (insn
= epilogue_seq
; insn
; insn
= next
)
6076 next
= NEXT_INSN (insn
);
6078 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6079 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6083 /* Threading the prologue and epilogue changes the artificial refs
6084 in the entry and exit blocks. */
6085 epilogue_completed
= 1;
6086 df_update_entry_exit_and_calls ();
6089 /* Reposition the prologue-end and epilogue-begin notes after
6090 instruction scheduling. */
6093 reposition_prologue_and_epilogue_notes (void)
6095 if (!targetm
.have_prologue ()
6096 && !targetm
.have_epilogue ()
6097 && !targetm
.have_sibcall_epilogue ())
6100 /* Since the hash table is created on demand, the fact that it is
6101 non-null is a signal that it is non-empty. */
6102 if (prologue_insn_hash
!= NULL
)
6104 size_t len
= prologue_insn_hash
->elements ();
6105 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6107 /* Scan from the beginning until we reach the last prologue insn. */
6108 /* ??? While we do have the CFG intact, there are two problems:
6109 (1) The prologue can contain loops (typically probing the stack),
6110 which means that the end of the prologue isn't in the first bb.
6111 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6112 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6116 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6119 else if (contains (insn
, prologue_insn_hash
))
6131 /* Scan forward looking for the PROLOGUE_END note. It should
6132 be right at the beginning of the block, possibly with other
6133 insn notes that got moved there. */
6134 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6137 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6142 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6144 last
= NEXT_INSN (last
);
6145 reorder_insns (note
, note
, last
);
6149 if (epilogue_insn_hash
!= NULL
)
6154 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6156 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6157 basic_block bb
= e
->src
;
6159 /* Scan from the beginning until we reach the first epilogue insn. */
6160 FOR_BB_INSNS (bb
, insn
)
6164 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6171 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6181 /* If the function has a single basic block, and no real
6182 epilogue insns (e.g. sibcall with no cleanup), the
6183 epilogue note can get scheduled before the prologue
6184 note. If we have frame related prologue insns, having
6185 them scanned during the epilogue will result in a crash.
6186 In this case re-order the epilogue note to just before
6187 the last insn in the block. */
6189 first
= BB_END (bb
);
6191 if (PREV_INSN (first
) != note
)
6192 reorder_insns (note
, note
, PREV_INSN (first
));
6198 /* Returns the name of function declared by FNDECL. */
6200 fndecl_name (tree fndecl
)
6204 return lang_hooks
.decl_printable_name (fndecl
, 2);
6207 /* Returns the name of function FN. */
6209 function_name (struct function
*fn
)
6211 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6212 return fndecl_name (fndecl
);
6215 /* Returns the name of the current function. */
6217 current_function_name (void)
6219 return function_name (cfun
);
6224 rest_of_handle_check_leaf_regs (void)
6226 #ifdef LEAF_REGISTERS
6227 crtl
->uses_only_leaf_regs
6228 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6233 /* Insert a TYPE into the used types hash table of CFUN. */
6236 used_types_insert_helper (tree type
, struct function
*func
)
6238 if (type
!= NULL
&& func
!= NULL
)
6240 if (func
->used_types_hash
== NULL
)
6241 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6243 func
->used_types_hash
->add (type
);
6247 /* Given a type, insert it into the used hash table in cfun. */
6249 used_types_insert (tree t
)
6251 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6256 if (TREE_CODE (t
) == ERROR_MARK
)
6258 if (TYPE_NAME (t
) == NULL_TREE
6259 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6260 t
= TYPE_MAIN_VARIANT (t
);
6261 if (debug_info_level
> DINFO_LEVEL_NONE
)
6264 used_types_insert_helper (t
, cfun
);
6267 /* So this might be a type referenced by a global variable.
6268 Record that type so that we can later decide to emit its
6269 debug information. */
6270 vec_safe_push (types_used_by_cur_var_decl
, t
);
6275 /* Helper to Hash a struct types_used_by_vars_entry. */
6278 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6280 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6282 return iterative_hash_object (entry
->type
,
6283 iterative_hash_object (entry
->var_decl
, 0));
6286 /* Hash function of the types_used_by_vars_entry hash table. */
6289 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6291 return hash_types_used_by_vars_entry (entry
);
6294 /*Equality function of the types_used_by_vars_entry hash table. */
6297 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6298 types_used_by_vars_entry
*e2
)
6300 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6303 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6306 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6308 if (type
!= NULL
&& var_decl
!= NULL
)
6310 types_used_by_vars_entry
**slot
;
6311 struct types_used_by_vars_entry e
;
6312 e
.var_decl
= var_decl
;
6314 if (types_used_by_vars_hash
== NULL
)
6315 types_used_by_vars_hash
6316 = hash_table
<used_type_hasher
>::create_ggc (37);
6318 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6321 struct types_used_by_vars_entry
*entry
;
6322 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6324 entry
->var_decl
= var_decl
;
6332 const pass_data pass_data_leaf_regs
=
6334 RTL_PASS
, /* type */
6335 "*leaf_regs", /* name */
6336 OPTGROUP_NONE
, /* optinfo_flags */
6337 TV_NONE
, /* tv_id */
6338 0, /* properties_required */
6339 0, /* properties_provided */
6340 0, /* properties_destroyed */
6341 0, /* todo_flags_start */
6342 0, /* todo_flags_finish */
6345 class pass_leaf_regs
: public rtl_opt_pass
6348 pass_leaf_regs (gcc::context
*ctxt
)
6349 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6352 /* opt_pass methods: */
6353 virtual unsigned int execute (function
*)
6355 return rest_of_handle_check_leaf_regs ();
6358 }; // class pass_leaf_regs
6363 make_pass_leaf_regs (gcc::context
*ctxt
)
6365 return new pass_leaf_regs (ctxt
);
6369 rest_of_handle_thread_prologue_and_epilogue (void)
6371 /* prepare_shrink_wrap is sensitive to the block structure of the control
6372 flow graph, so clean it up first. */
6376 /* On some machines, the prologue and epilogue code, or parts thereof,
6377 can be represented as RTL. Doing so lets us schedule insns between
6378 it and the rest of the code and also allows delayed branch
6379 scheduling to operate in the epilogue. */
6380 thread_prologue_and_epilogue_insns ();
6382 /* Some non-cold blocks may now be only reachable from cold blocks.
6384 fixup_partitions ();
6386 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6388 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6390 /* The stack usage info is finalized during prologue expansion. */
6391 if (flag_stack_usage_info
)
6392 output_stack_usage ();
6399 const pass_data pass_data_thread_prologue_and_epilogue
=
6401 RTL_PASS
, /* type */
6402 "pro_and_epilogue", /* name */
6403 OPTGROUP_NONE
, /* optinfo_flags */
6404 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6405 0, /* properties_required */
6406 0, /* properties_provided */
6407 0, /* properties_destroyed */
6408 0, /* todo_flags_start */
6409 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6412 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6415 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6416 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6419 /* opt_pass methods: */
6420 virtual unsigned int execute (function
*)
6422 return rest_of_handle_thread_prologue_and_epilogue ();
6425 }; // class pass_thread_prologue_and_epilogue
6430 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6432 return new pass_thread_prologue_and_epilogue (ctxt
);
6436 /* This mini-pass fixes fall-out from SSA in asm statements that have
6437 in-out constraints. Say you start with
6440 asm ("": "+mr" (inout));
6443 which is transformed very early to use explicit output and match operands:
6446 asm ("": "=mr" (inout) : "0" (inout));
6449 Or, after SSA and copyprop,
6451 asm ("": "=mr" (inout_2) : "0" (inout_1));
6454 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6455 they represent two separate values, so they will get different pseudo
6456 registers during expansion. Then, since the two operands need to match
6457 per the constraints, but use different pseudo registers, reload can
6458 only register a reload for these operands. But reloads can only be
6459 satisfied by hardregs, not by memory, so we need a register for this
6460 reload, just because we are presented with non-matching operands.
6461 So, even though we allow memory for this operand, no memory can be
6462 used for it, just because the two operands don't match. This can
6463 cause reload failures on register-starved targets.
6465 So it's a symptom of reload not being able to use memory for reloads
6466 or, alternatively it's also a symptom of both operands not coming into
6467 reload as matching (in which case the pseudo could go to memory just
6468 fine, as the alternative allows it, and no reload would be necessary).
6469 We fix the latter problem here, by transforming
6471 asm ("": "=mr" (inout_2) : "0" (inout_1));
6476 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6479 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6482 bool changed
= false;
6483 rtx op
= SET_SRC (p_sets
[0]);
6484 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6485 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6486 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6488 memset (output_matched
, 0, noutputs
* sizeof (bool));
6489 for (i
= 0; i
< ninputs
; i
++)
6493 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6497 if (*constraint
== '%')
6500 match
= strtoul (constraint
, &end
, 10);
6501 if (end
== constraint
)
6504 gcc_assert (match
< noutputs
);
6505 output
= SET_DEST (p_sets
[match
]);
6506 input
= RTVEC_ELT (inputs
, i
);
6507 /* Only do the transformation for pseudos. */
6508 if (! REG_P (output
)
6509 || rtx_equal_p (output
, input
)
6510 || (GET_MODE (input
) != VOIDmode
6511 && GET_MODE (input
) != GET_MODE (output
)))
6514 /* We can't do anything if the output is also used as input,
6515 as we're going to overwrite it. */
6516 for (j
= 0; j
< ninputs
; j
++)
6517 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6522 /* Avoid changing the same input several times. For
6523 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6524 only change in once (to out1), rather than changing it
6525 first to out1 and afterwards to out2. */
6528 for (j
= 0; j
< noutputs
; j
++)
6529 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6534 output_matched
[match
] = true;
6537 emit_move_insn (output
, input
);
6538 insns
= get_insns ();
6540 emit_insn_before (insns
, insn
);
6542 /* Now replace all mentions of the input with output. We can't
6543 just replace the occurrence in inputs[i], as the register might
6544 also be used in some other input (or even in an address of an
6545 output), which would mean possibly increasing the number of
6546 inputs by one (namely 'output' in addition), which might pose
6547 a too complicated problem for reload to solve. E.g. this situation:
6549 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6551 Here 'input' is used in two occurrences as input (once for the
6552 input operand, once for the address in the second output operand).
6553 If we would replace only the occurrence of the input operand (to
6554 make the matching) we would be left with this:
6557 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6559 Now we suddenly have two different input values (containing the same
6560 value, but different pseudos) where we formerly had only one.
6561 With more complicated asms this might lead to reload failures
6562 which wouldn't have happen without this pass. So, iterate over
6563 all operands and replace all occurrences of the register used. */
6564 for (j
= 0; j
< noutputs
; j
++)
6565 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6566 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6567 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6569 for (j
= 0; j
< ninputs
; j
++)
6570 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6571 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6578 df_insn_rescan (insn
);
6581 /* Add the decl D to the local_decls list of FUN. */
6584 add_local_decl (struct function
*fun
, tree d
)
6586 gcc_assert (TREE_CODE (d
) == VAR_DECL
);
6587 vec_safe_push (fun
->local_decls
, d
);
6592 const pass_data pass_data_match_asm_constraints
=
6594 RTL_PASS
, /* type */
6595 "asmcons", /* name */
6596 OPTGROUP_NONE
, /* optinfo_flags */
6597 TV_NONE
, /* tv_id */
6598 0, /* properties_required */
6599 0, /* properties_provided */
6600 0, /* properties_destroyed */
6601 0, /* todo_flags_start */
6602 0, /* todo_flags_finish */
6605 class pass_match_asm_constraints
: public rtl_opt_pass
6608 pass_match_asm_constraints (gcc::context
*ctxt
)
6609 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6612 /* opt_pass methods: */
6613 virtual unsigned int execute (function
*);
6615 }; // class pass_match_asm_constraints
6618 pass_match_asm_constraints::execute (function
*fun
)
6625 if (!crtl
->has_asm_statement
)
6628 df_set_flags (DF_DEFER_INSN_RESCAN
);
6629 FOR_EACH_BB_FN (bb
, fun
)
6631 FOR_BB_INSNS (bb
, insn
)
6636 pat
= PATTERN (insn
);
6637 if (GET_CODE (pat
) == PARALLEL
)
6638 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6639 else if (GET_CODE (pat
) == SET
)
6640 p_sets
= &PATTERN (insn
), noutputs
= 1;
6644 if (GET_CODE (*p_sets
) == SET
6645 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6646 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6650 return TODO_df_finish
;
6656 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6658 return new pass_match_asm_constraints (ctxt
);
6662 #include "gt-function.h"