1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
45 #include "stringpool.h"
51 #include "rtl-error.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
61 #include "optabs-tree.h"
63 #include "langhooks.h"
64 #include "common/common-target.h"
66 #include "tree-pass.h"
70 #include "cfgcleanup.h"
71 #include "cfgexpand.h"
72 #include "shrink-wrap.h"
75 #include "tree-chkp.h"
80 /* So we can assign to cfun in this file. */
83 #ifndef STACK_ALIGNMENT_NEEDED
84 #define STACK_ALIGNMENT_NEEDED 1
87 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
89 /* Round a value to the lowest integer less than it that is a multiple of
90 the required alignment. Avoid using division in case the value is
91 negative. Assume the alignment is a power of two. */
92 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94 /* Similar, but round to the next highest integer that meets the
96 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98 /* Nonzero once virtual register instantiation has been done.
99 assign_stack_local uses frame_pointer_rtx when this is nonzero.
100 calls.c:emit_library_call_value_1 uses it to set up
101 post-instantiation libcalls. */
102 int virtuals_instantiated
;
104 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
105 static GTY(()) int funcdef_no
;
107 /* These variables hold pointers to functions to create and destroy
108 target specific, per-function data structures. */
109 struct machine_function
* (*init_machine_status
) (void);
111 /* The currently compiled function. */
112 struct function
*cfun
= 0;
114 /* These hashes record the prologue and epilogue insns. */
116 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
118 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
119 static bool equal (rtx a
, rtx b
) { return a
== b
; }
123 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
125 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
128 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
129 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
131 /* Forward declarations. */
133 static struct temp_slot
*find_temp_slot_from_address (rtx
);
134 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
135 static void pad_below (struct args_size
*, machine_mode
, tree
);
136 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
137 static int all_blocks (tree
, tree
*);
138 static tree
*get_block_vector (tree
, int *);
139 extern tree
debug_find_var_in_block_tree (tree
, tree
);
140 /* We always define `record_insns' even if it's not used so that we
141 can always export `prologue_epilogue_contains'. */
142 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
144 static bool contains (const_rtx
, hash_table
<insn_cache_hasher
> *);
145 static void prepare_function_start (void);
146 static void do_clobber_return_reg (rtx
, void *);
147 static void do_use_return_reg (rtx
, void *);
150 /* Stack of nested functions. */
151 /* Keep track of the cfun stack. */
153 static vec
<function
*> function_context_stack
;
155 /* Save the current context for compilation of a nested function.
156 This is called from language-specific code. */
159 push_function_context (void)
162 allocate_struct_function (NULL
, false);
164 function_context_stack
.safe_push (cfun
);
168 /* Restore the last saved context, at the end of a nested function.
169 This function is called from language-specific code. */
172 pop_function_context (void)
174 struct function
*p
= function_context_stack
.pop ();
176 current_function_decl
= p
->decl
;
178 /* Reset variables that have known state during rtx generation. */
179 virtuals_instantiated
= 0;
180 generating_concat_p
= 1;
183 /* Clear out all parts of the state in F that can safely be discarded
184 after the function has been parsed, but not compiled, to let
185 garbage collection reclaim the memory. */
188 free_after_parsing (struct function
*f
)
193 /* Clear out all parts of the state in F that can safely be discarded
194 after the function has been compiled, to let garbage collection
195 reclaim the memory. */
198 free_after_compilation (struct function
*f
)
200 prologue_insn_hash
= NULL
;
201 epilogue_insn_hash
= NULL
;
203 free (crtl
->emit
.regno_pointer_align
);
205 memset (crtl
, 0, sizeof (struct rtl_data
));
209 f
->curr_properties
&= ~PROP_cfg
;
211 regno_reg_rtx
= NULL
;
214 /* Return size needed for stack frame based on slots so far allocated.
215 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
216 the caller may have to do that. */
219 get_frame_size (void)
221 if (FRAME_GROWS_DOWNWARD
)
222 return -frame_offset
;
227 /* Issue an error message and return TRUE if frame OFFSET overflows in
228 the signed target pointer arithmetics for function FUNC. Otherwise
232 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
234 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
236 if (size
> (HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
237 /* Leave room for the fixed part of the frame. */
238 - 64 * UNITS_PER_WORD
)
240 error_at (DECL_SOURCE_LOCATION (func
),
241 "total size of local objects too large");
248 /* Return stack slot alignment in bits for TYPE and MODE. */
251 get_stack_local_alignment (tree type
, machine_mode mode
)
253 unsigned int alignment
;
256 alignment
= BIGGEST_ALIGNMENT
;
258 alignment
= GET_MODE_ALIGNMENT (mode
);
260 /* Allow the frond-end to (possibly) increase the alignment of this
263 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
265 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
268 /* Determine whether it is possible to fit a stack slot of size SIZE and
269 alignment ALIGNMENT into an area in the stack frame that starts at
270 frame offset START and has a length of LENGTH. If so, store the frame
271 offset to be used for the stack slot in *POFFSET and return true;
272 return false otherwise. This function will extend the frame size when
273 given a start/length pair that lies at the end of the frame. */
276 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
277 HOST_WIDE_INT size
, unsigned int alignment
,
278 HOST_WIDE_INT
*poffset
)
280 HOST_WIDE_INT this_frame_offset
;
281 int frame_off
, frame_alignment
, frame_phase
;
283 /* Calculate how many bytes the start of local variables is off from
285 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
286 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
287 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
289 /* Round the frame offset to the specified alignment. */
291 /* We must be careful here, since FRAME_OFFSET might be negative and
292 division with a negative dividend isn't as well defined as we might
293 like. So we instead assume that ALIGNMENT is a power of two and
294 use logical operations which are unambiguous. */
295 if (FRAME_GROWS_DOWNWARD
)
297 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
298 (unsigned HOST_WIDE_INT
) alignment
)
302 = (CEIL_ROUND (start
- frame_phase
,
303 (unsigned HOST_WIDE_INT
) alignment
)
306 /* See if it fits. If this space is at the edge of the frame,
307 consider extending the frame to make it fit. Our caller relies on
308 this when allocating a new slot. */
309 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
310 frame_offset
= this_frame_offset
;
311 else if (this_frame_offset
< start
)
313 else if (start
+ length
== frame_offset
314 && this_frame_offset
+ size
> start
+ length
)
315 frame_offset
= this_frame_offset
+ size
;
316 else if (this_frame_offset
+ size
> start
+ length
)
319 *poffset
= this_frame_offset
;
323 /* Create a new frame_space structure describing free space in the stack
324 frame beginning at START and ending at END, and chain it into the
325 function's frame_space_list. */
328 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
330 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
331 space
->next
= crtl
->frame_space_list
;
332 crtl
->frame_space_list
= space
;
333 space
->start
= start
;
334 space
->length
= end
- start
;
337 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
338 with machine mode MODE.
340 ALIGN controls the amount of alignment for the address of the slot:
341 0 means according to MODE,
342 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
343 -2 means use BITS_PER_UNIT,
344 positive specifies alignment boundary in bits.
346 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
347 alignment and ASLK_RECORD_PAD bit set if we should remember
348 extra space we allocated for alignment purposes. When we are
349 called from assign_stack_temp_for_type, it is not set so we don't
350 track the same stack slot in two independent lists.
352 We do not round to stack_boundary here. */
355 assign_stack_local_1 (machine_mode mode
, HOST_WIDE_INT size
,
359 int bigend_correction
= 0;
360 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
361 unsigned int alignment
, alignment_in_bits
;
365 alignment
= get_stack_local_alignment (NULL
, mode
);
366 alignment
/= BITS_PER_UNIT
;
368 else if (align
== -1)
370 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
371 size
= CEIL_ROUND (size
, alignment
);
373 else if (align
== -2)
374 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
376 alignment
= align
/ BITS_PER_UNIT
;
378 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
380 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
381 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
383 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
384 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
387 if (SUPPORTS_STACK_ALIGNMENT
)
389 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
391 if (!crtl
->stack_realign_processed
)
392 crtl
->stack_alignment_estimated
= alignment_in_bits
;
395 /* If stack is realigned and stack alignment value
396 hasn't been finalized, it is OK not to increase
397 stack_alignment_estimated. The bigger alignment
398 requirement is recorded in stack_alignment_needed
400 gcc_assert (!crtl
->stack_realign_finalized
);
401 if (!crtl
->stack_realign_needed
)
403 /* It is OK to reduce the alignment as long as the
404 requested size is 0 or the estimated stack
405 alignment >= mode alignment. */
406 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
408 || (crtl
->stack_alignment_estimated
409 >= GET_MODE_ALIGNMENT (mode
)));
410 alignment_in_bits
= crtl
->stack_alignment_estimated
;
411 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
417 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
418 crtl
->stack_alignment_needed
= alignment_in_bits
;
419 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
420 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
422 if (mode
!= BLKmode
|| size
!= 0)
424 if (kind
& ASLK_RECORD_PAD
)
426 struct frame_space
**psp
;
428 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
430 struct frame_space
*space
= *psp
;
431 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
432 alignment
, &slot_offset
))
435 if (slot_offset
> space
->start
)
436 add_frame_space (space
->start
, slot_offset
);
437 if (slot_offset
+ size
< space
->start
+ space
->length
)
438 add_frame_space (slot_offset
+ size
,
439 space
->start
+ space
->length
);
444 else if (!STACK_ALIGNMENT_NEEDED
)
446 slot_offset
= frame_offset
;
450 old_frame_offset
= frame_offset
;
452 if (FRAME_GROWS_DOWNWARD
)
454 frame_offset
-= size
;
455 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
457 if (kind
& ASLK_RECORD_PAD
)
459 if (slot_offset
> frame_offset
)
460 add_frame_space (frame_offset
, slot_offset
);
461 if (slot_offset
+ size
< old_frame_offset
)
462 add_frame_space (slot_offset
+ size
, old_frame_offset
);
467 frame_offset
+= size
;
468 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
470 if (kind
& ASLK_RECORD_PAD
)
472 if (slot_offset
> old_frame_offset
)
473 add_frame_space (old_frame_offset
, slot_offset
);
474 if (slot_offset
+ size
< frame_offset
)
475 add_frame_space (slot_offset
+ size
, frame_offset
);
480 /* On a big-endian machine, if we are allocating more space than we will use,
481 use the least significant bytes of those that are allocated. */
482 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
483 bigend_correction
= size
- GET_MODE_SIZE (mode
);
485 /* If we have already instantiated virtual registers, return the actual
486 address relative to the frame pointer. */
487 if (virtuals_instantiated
)
488 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
490 (slot_offset
+ bigend_correction
491 + STARTING_FRAME_OFFSET
, Pmode
));
493 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
495 (slot_offset
+ bigend_correction
,
498 x
= gen_rtx_MEM (mode
, addr
);
499 set_mem_align (x
, alignment_in_bits
);
500 MEM_NOTRAP_P (x
) = 1;
502 vec_safe_push (stack_slot_list
, x
);
504 if (frame_offset_overflow (frame_offset
, current_function_decl
))
510 /* Wrap up assign_stack_local_1 with last parameter as false. */
513 assign_stack_local (machine_mode mode
, HOST_WIDE_INT size
, int align
)
515 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
518 /* In order to evaluate some expressions, such as function calls returning
519 structures in memory, we need to temporarily allocate stack locations.
520 We record each allocated temporary in the following structure.
522 Associated with each temporary slot is a nesting level. When we pop up
523 one level, all temporaries associated with the previous level are freed.
524 Normally, all temporaries are freed after the execution of the statement
525 in which they were created. However, if we are inside a ({...}) grouping,
526 the result may be in a temporary and hence must be preserved. If the
527 result could be in a temporary, we preserve it if we can determine which
528 one it is in. If we cannot determine which temporary may contain the
529 result, all temporaries are preserved. A temporary is preserved by
530 pretending it was allocated at the previous nesting level. */
532 struct GTY(()) temp_slot
{
533 /* Points to next temporary slot. */
534 struct temp_slot
*next
;
535 /* Points to previous temporary slot. */
536 struct temp_slot
*prev
;
537 /* The rtx to used to reference the slot. */
539 /* The size, in units, of the slot. */
541 /* The type of the object in the slot, or zero if it doesn't correspond
542 to a type. We use this to determine whether a slot can be reused.
543 It can be reused if objects of the type of the new slot will always
544 conflict with objects of the type of the old slot. */
546 /* The alignment (in bits) of the slot. */
548 /* Nonzero if this temporary is currently in use. */
550 /* Nesting level at which this slot is being used. */
552 /* The offset of the slot from the frame_pointer, including extra space
553 for alignment. This info is for combine_temp_slots. */
554 HOST_WIDE_INT base_offset
;
555 /* The size of the slot, including extra space for alignment. This
556 info is for combine_temp_slots. */
557 HOST_WIDE_INT full_size
;
560 /* Entry for the below hash table. */
561 struct GTY((for_user
)) temp_slot_address_entry
{
564 struct temp_slot
*temp_slot
;
567 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
569 static hashval_t
hash (temp_slot_address_entry
*);
570 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
573 /* A table of addresses that represent a stack slot. The table is a mapping
574 from address RTXen to a temp slot. */
575 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
576 static size_t n_temp_slots_in_use
;
578 /* Removes temporary slot TEMP from LIST. */
581 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
584 temp
->next
->prev
= temp
->prev
;
586 temp
->prev
->next
= temp
->next
;
590 temp
->prev
= temp
->next
= NULL
;
593 /* Inserts temporary slot TEMP to LIST. */
596 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
600 (*list
)->prev
= temp
;
605 /* Returns the list of used temp slots at LEVEL. */
607 static struct temp_slot
**
608 temp_slots_at_level (int level
)
610 if (level
>= (int) vec_safe_length (used_temp_slots
))
611 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
613 return &(*used_temp_slots
)[level
];
616 /* Returns the maximal temporary slot level. */
619 max_slot_level (void)
621 if (!used_temp_slots
)
624 return used_temp_slots
->length () - 1;
627 /* Moves temporary slot TEMP to LEVEL. */
630 move_slot_to_level (struct temp_slot
*temp
, int level
)
632 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
633 insert_slot_to_list (temp
, temp_slots_at_level (level
));
637 /* Make temporary slot TEMP available. */
640 make_slot_available (struct temp_slot
*temp
)
642 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
643 insert_slot_to_list (temp
, &avail_temp_slots
);
646 n_temp_slots_in_use
--;
649 /* Compute the hash value for an address -> temp slot mapping.
650 The value is cached on the mapping entry. */
652 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
654 int do_not_record
= 0;
655 return hash_rtx (t
->address
, GET_MODE (t
->address
),
656 &do_not_record
, NULL
, false);
659 /* Return the hash value for an address -> temp slot mapping. */
661 temp_address_hasher::hash (temp_slot_address_entry
*t
)
666 /* Compare two address -> temp slot mapping entries. */
668 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
669 temp_slot_address_entry
*t2
)
671 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
674 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
676 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
678 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
679 t
->address
= address
;
680 t
->temp_slot
= temp_slot
;
681 t
->hash
= temp_slot_address_compute_hash (t
);
682 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
685 /* Remove an address -> temp slot mapping entry if the temp slot is
686 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
688 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
690 const struct temp_slot_address_entry
*t
= *slot
;
691 if (! t
->temp_slot
->in_use
)
692 temp_slot_address_table
->clear_slot (slot
);
696 /* Remove all mappings of addresses to unused temp slots. */
698 remove_unused_temp_slot_addresses (void)
700 /* Use quicker clearing if there aren't any active temp slots. */
701 if (n_temp_slots_in_use
)
702 temp_slot_address_table
->traverse
703 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
705 temp_slot_address_table
->empty ();
708 /* Find the temp slot corresponding to the object at address X. */
710 static struct temp_slot
*
711 find_temp_slot_from_address (rtx x
)
714 struct temp_slot_address_entry tmp
, *t
;
716 /* First try the easy way:
717 See if X exists in the address -> temp slot mapping. */
719 tmp
.temp_slot
= NULL
;
720 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
721 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
725 /* If we have a sum involving a register, see if it points to a temp
727 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
728 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
730 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
731 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
734 /* Last resort: Address is a virtual stack var address. */
735 if (GET_CODE (x
) == PLUS
736 && XEXP (x
, 0) == virtual_stack_vars_rtx
737 && CONST_INT_P (XEXP (x
, 1)))
740 for (i
= max_slot_level (); i
>= 0; i
--)
741 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
743 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
744 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
752 /* Allocate a temporary stack slot and record it for possible later
755 MODE is the machine mode to be given to the returned rtx.
757 SIZE is the size in units of the space required. We do no rounding here
758 since assign_stack_local will do any required rounding.
760 TYPE is the type that will be used for the stack slot. */
763 assign_stack_temp_for_type (machine_mode mode
, HOST_WIDE_INT size
,
767 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
770 /* If SIZE is -1 it means that somebody tried to allocate a temporary
771 of a variable size. */
772 gcc_assert (size
!= -1);
774 align
= get_stack_local_alignment (type
, mode
);
776 /* Try to find an available, already-allocated temporary of the proper
777 mode which meets the size and alignment requirements. Choose the
778 smallest one with the closest alignment.
780 If assign_stack_temp is called outside of the tree->rtl expansion,
781 we cannot reuse the stack slots (that may still refer to
782 VIRTUAL_STACK_VARS_REGNUM). */
783 if (!virtuals_instantiated
)
785 for (p
= avail_temp_slots
; p
; p
= p
->next
)
787 if (p
->align
>= align
&& p
->size
>= size
788 && GET_MODE (p
->slot
) == mode
789 && objects_must_conflict_p (p
->type
, type
)
790 && (best_p
== 0 || best_p
->size
> p
->size
791 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
793 if (p
->align
== align
&& p
->size
== size
)
796 cut_slot_from_list (selected
, &avail_temp_slots
);
805 /* Make our best, if any, the one to use. */
809 cut_slot_from_list (selected
, &avail_temp_slots
);
811 /* If there are enough aligned bytes left over, make them into a new
812 temp_slot so that the extra bytes don't get wasted. Do this only
813 for BLKmode slots, so that we can be sure of the alignment. */
814 if (GET_MODE (best_p
->slot
) == BLKmode
)
816 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
817 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
819 if (best_p
->size
- rounded_size
>= alignment
)
821 p
= ggc_alloc
<temp_slot
> ();
823 p
->size
= best_p
->size
- rounded_size
;
824 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
825 p
->full_size
= best_p
->full_size
- rounded_size
;
826 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
827 p
->align
= best_p
->align
;
828 p
->type
= best_p
->type
;
829 insert_slot_to_list (p
, &avail_temp_slots
);
831 vec_safe_push (stack_slot_list
, p
->slot
);
833 best_p
->size
= rounded_size
;
834 best_p
->full_size
= rounded_size
;
839 /* If we still didn't find one, make a new temporary. */
842 HOST_WIDE_INT frame_offset_old
= frame_offset
;
844 p
= ggc_alloc
<temp_slot
> ();
846 /* We are passing an explicit alignment request to assign_stack_local.
847 One side effect of that is assign_stack_local will not round SIZE
848 to ensure the frame offset remains suitably aligned.
850 So for requests which depended on the rounding of SIZE, we go ahead
851 and round it now. We also make sure ALIGNMENT is at least
852 BIGGEST_ALIGNMENT. */
853 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
854 p
->slot
= assign_stack_local_1 (mode
,
864 /* The following slot size computation is necessary because we don't
865 know the actual size of the temporary slot until assign_stack_local
866 has performed all the frame alignment and size rounding for the
867 requested temporary. Note that extra space added for alignment
868 can be either above or below this stack slot depending on which
869 way the frame grows. We include the extra space if and only if it
870 is above this slot. */
871 if (FRAME_GROWS_DOWNWARD
)
872 p
->size
= frame_offset_old
- frame_offset
;
876 /* Now define the fields used by combine_temp_slots. */
877 if (FRAME_GROWS_DOWNWARD
)
879 p
->base_offset
= frame_offset
;
880 p
->full_size
= frame_offset_old
- frame_offset
;
884 p
->base_offset
= frame_offset_old
;
885 p
->full_size
= frame_offset
- frame_offset_old
;
894 p
->level
= temp_slot_level
;
895 n_temp_slots_in_use
++;
897 pp
= temp_slots_at_level (p
->level
);
898 insert_slot_to_list (p
, pp
);
899 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
901 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
902 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
903 vec_safe_push (stack_slot_list
, slot
);
905 /* If we know the alias set for the memory that will be used, use
906 it. If there's no TYPE, then we don't know anything about the
907 alias set for the memory. */
908 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
909 set_mem_align (slot
, align
);
911 /* If a type is specified, set the relevant flags. */
913 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
914 MEM_NOTRAP_P (slot
) = 1;
919 /* Allocate a temporary stack slot and record it for possible later
920 reuse. First two arguments are same as in preceding function. */
923 assign_stack_temp (machine_mode mode
, HOST_WIDE_INT size
)
925 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
928 /* Assign a temporary.
929 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
930 and so that should be used in error messages. In either case, we
931 allocate of the given type.
932 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
933 it is 0 if a register is OK.
934 DONT_PROMOTE is 1 if we should not promote values in register
938 assign_temp (tree type_or_decl
, int memory_required
,
939 int dont_promote ATTRIBUTE_UNUSED
)
947 if (DECL_P (type_or_decl
))
948 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
950 decl
= NULL
, type
= type_or_decl
;
952 mode
= TYPE_MODE (type
);
954 unsignedp
= TYPE_UNSIGNED (type
);
957 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
958 end. See also create_tmp_var for the gimplification-time check. */
959 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
961 if (mode
== BLKmode
|| memory_required
)
963 HOST_WIDE_INT size
= int_size_in_bytes (type
);
966 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
967 problems with allocating the stack space. */
971 /* Unfortunately, we don't yet know how to allocate variable-sized
972 temporaries. However, sometimes we can find a fixed upper limit on
973 the size, so try that instead. */
975 size
= max_int_size_in_bytes (type
);
977 /* The size of the temporary may be too large to fit into an integer. */
978 /* ??? Not sure this should happen except for user silliness, so limit
979 this to things that aren't compiler-generated temporaries. The
980 rest of the time we'll die in assign_stack_temp_for_type. */
981 if (decl
&& size
== -1
982 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
984 error ("size of variable %q+D is too large", decl
);
988 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
994 mode
= promote_mode (type
, mode
, &unsignedp
);
997 return gen_reg_rtx (mode
);
1000 /* Combine temporary stack slots which are adjacent on the stack.
1002 This allows for better use of already allocated stack space. This is only
1003 done for BLKmode slots because we can be sure that we won't have alignment
1004 problems in this case. */
1007 combine_temp_slots (void)
1009 struct temp_slot
*p
, *q
, *next
, *next_q
;
1012 /* We can't combine slots, because the information about which slot
1013 is in which alias set will be lost. */
1014 if (flag_strict_aliasing
)
1017 /* If there are a lot of temp slots, don't do anything unless
1018 high levels of optimization. */
1019 if (! flag_expensive_optimizations
)
1020 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1021 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1024 for (p
= avail_temp_slots
; p
; p
= next
)
1030 if (GET_MODE (p
->slot
) != BLKmode
)
1033 for (q
= p
->next
; q
; q
= next_q
)
1039 if (GET_MODE (q
->slot
) != BLKmode
)
1042 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1044 /* Q comes after P; combine Q into P. */
1046 p
->full_size
+= q
->full_size
;
1049 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1051 /* P comes after Q; combine P into Q. */
1053 q
->full_size
+= p
->full_size
;
1058 cut_slot_from_list (q
, &avail_temp_slots
);
1061 /* Either delete P or advance past it. */
1063 cut_slot_from_list (p
, &avail_temp_slots
);
1067 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1068 slot that previously was known by OLD_RTX. */
1071 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1073 struct temp_slot
*p
;
1075 if (rtx_equal_p (old_rtx
, new_rtx
))
1078 p
= find_temp_slot_from_address (old_rtx
);
1080 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1081 NEW_RTX is a register, see if one operand of the PLUS is a
1082 temporary location. If so, NEW_RTX points into it. Otherwise,
1083 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1084 in common between them. If so, try a recursive call on those
1088 if (GET_CODE (old_rtx
) != PLUS
)
1091 if (REG_P (new_rtx
))
1093 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1094 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1097 else if (GET_CODE (new_rtx
) != PLUS
)
1100 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1101 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1102 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1103 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1104 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1105 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1106 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1107 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1112 /* Otherwise add an alias for the temp's address. */
1113 insert_temp_slot_address (new_rtx
, p
);
1116 /* If X could be a reference to a temporary slot, mark that slot as
1117 belonging to the to one level higher than the current level. If X
1118 matched one of our slots, just mark that one. Otherwise, we can't
1119 easily predict which it is, so upgrade all of them.
1121 This is called when an ({...}) construct occurs and a statement
1122 returns a value in memory. */
1125 preserve_temp_slots (rtx x
)
1127 struct temp_slot
*p
= 0, *next
;
1132 /* If X is a register that is being used as a pointer, see if we have
1133 a temporary slot we know it points to. */
1134 if (REG_P (x
) && REG_POINTER (x
))
1135 p
= find_temp_slot_from_address (x
);
1137 /* If X is not in memory or is at a constant address, it cannot be in
1138 a temporary slot. */
1139 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1142 /* First see if we can find a match. */
1144 p
= find_temp_slot_from_address (XEXP (x
, 0));
1148 if (p
->level
== temp_slot_level
)
1149 move_slot_to_level (p
, temp_slot_level
- 1);
1153 /* Otherwise, preserve all non-kept slots at this level. */
1154 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1157 move_slot_to_level (p
, temp_slot_level
- 1);
1161 /* Free all temporaries used so far. This is normally called at the
1162 end of generating code for a statement. */
1165 free_temp_slots (void)
1167 struct temp_slot
*p
, *next
;
1168 bool some_available
= false;
1170 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1173 make_slot_available (p
);
1174 some_available
= true;
1179 remove_unused_temp_slot_addresses ();
1180 combine_temp_slots ();
1184 /* Push deeper into the nesting level for stack temporaries. */
1187 push_temp_slots (void)
1192 /* Pop a temporary nesting level. All slots in use in the current level
1196 pop_temp_slots (void)
1202 /* Initialize temporary slots. */
1205 init_temp_slots (void)
1207 /* We have not allocated any temporaries yet. */
1208 avail_temp_slots
= 0;
1209 vec_alloc (used_temp_slots
, 0);
1210 temp_slot_level
= 0;
1211 n_temp_slots_in_use
= 0;
1213 /* Set up the table to map addresses to temp slots. */
1214 if (! temp_slot_address_table
)
1215 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1217 temp_slot_address_table
->empty ();
1220 /* Functions and data structures to keep track of the values hard regs
1221 had at the start of the function. */
1223 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1224 and has_hard_reg_initial_val.. */
1225 struct GTY(()) initial_value_pair
{
1229 /* ??? This could be a VEC but there is currently no way to define an
1230 opaque VEC type. This could be worked around by defining struct
1231 initial_value_pair in function.h. */
1232 struct GTY(()) initial_value_struct
{
1235 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1238 /* If a pseudo represents an initial hard reg (or expression), return
1239 it, else return NULL_RTX. */
1242 get_hard_reg_initial_reg (rtx reg
)
1244 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1250 for (i
= 0; i
< ivs
->num_entries
; i
++)
1251 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1252 return ivs
->entries
[i
].hard_reg
;
1257 /* Make sure that there's a pseudo register of mode MODE that stores the
1258 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1261 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1263 struct initial_value_struct
*ivs
;
1266 rv
= has_hard_reg_initial_val (mode
, regno
);
1270 ivs
= crtl
->hard_reg_initial_vals
;
1273 ivs
= ggc_alloc
<initial_value_struct
> ();
1274 ivs
->num_entries
= 0;
1275 ivs
->max_entries
= 5;
1276 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1277 crtl
->hard_reg_initial_vals
= ivs
;
1280 if (ivs
->num_entries
>= ivs
->max_entries
)
1282 ivs
->max_entries
+= 5;
1283 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1287 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1288 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1290 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1293 /* See if get_hard_reg_initial_val has been used to create a pseudo
1294 for the initial value of hard register REGNO in mode MODE. Return
1295 the associated pseudo if so, otherwise return NULL. */
1298 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1300 struct initial_value_struct
*ivs
;
1303 ivs
= crtl
->hard_reg_initial_vals
;
1305 for (i
= 0; i
< ivs
->num_entries
; i
++)
1306 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1307 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1308 return ivs
->entries
[i
].pseudo
;
1314 emit_initial_value_sets (void)
1316 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1324 for (i
= 0; i
< ivs
->num_entries
; i
++)
1325 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1329 emit_insn_at_entry (seq
);
1333 /* Return the hardreg-pseudoreg initial values pair entry I and
1334 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1336 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1338 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1339 if (!ivs
|| i
>= ivs
->num_entries
)
1342 *hreg
= ivs
->entries
[i
].hard_reg
;
1343 *preg
= ivs
->entries
[i
].pseudo
;
1347 /* These routines are responsible for converting virtual register references
1348 to the actual hard register references once RTL generation is complete.
1350 The following four variables are used for communication between the
1351 routines. They contain the offsets of the virtual registers from their
1352 respective hard registers. */
1354 static int in_arg_offset
;
1355 static int var_offset
;
1356 static int dynamic_offset
;
1357 static int out_arg_offset
;
1358 static int cfa_offset
;
1360 /* In most machines, the stack pointer register is equivalent to the bottom
1363 #ifndef STACK_POINTER_OFFSET
1364 #define STACK_POINTER_OFFSET 0
1367 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1368 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1371 /* If not defined, pick an appropriate default for the offset of dynamically
1372 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1373 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1375 #ifndef STACK_DYNAMIC_OFFSET
1377 /* The bottom of the stack points to the actual arguments. If
1378 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1379 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1380 stack space for register parameters is not pushed by the caller, but
1381 rather part of the fixed stack areas and hence not included in
1382 `crtl->outgoing_args_size'. Nevertheless, we must allow
1383 for it when allocating stack dynamic objects. */
1385 #ifdef INCOMING_REG_PARM_STACK_SPACE
1386 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1387 ((ACCUMULATE_OUTGOING_ARGS \
1388 ? (crtl->outgoing_args_size \
1389 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1390 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1391 : 0) + (STACK_POINTER_OFFSET))
1393 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1394 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1395 + (STACK_POINTER_OFFSET))
1400 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1401 is a virtual register, return the equivalent hard register and set the
1402 offset indirectly through the pointer. Otherwise, return 0. */
1405 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1408 HOST_WIDE_INT offset
;
1410 if (x
== virtual_incoming_args_rtx
)
1412 if (stack_realign_drap
)
1414 /* Replace virtual_incoming_args_rtx with internal arg
1415 pointer if DRAP is used to realign stack. */
1416 new_rtx
= crtl
->args
.internal_arg_pointer
;
1420 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1422 else if (x
== virtual_stack_vars_rtx
)
1423 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1424 else if (x
== virtual_stack_dynamic_rtx
)
1425 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1426 else if (x
== virtual_outgoing_args_rtx
)
1427 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1428 else if (x
== virtual_cfa_rtx
)
1430 #ifdef FRAME_POINTER_CFA_OFFSET
1431 new_rtx
= frame_pointer_rtx
;
1433 new_rtx
= arg_pointer_rtx
;
1435 offset
= cfa_offset
;
1437 else if (x
== virtual_preferred_stack_boundary_rtx
)
1439 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1449 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1450 registers present inside of *LOC. The expression is simplified,
1451 as much as possible, but is not to be considered "valid" in any sense
1452 implied by the target. Return true if any change is made. */
1455 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1459 bool changed
= false;
1460 subrtx_ptr_iterator::array_type array
;
1461 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1467 HOST_WIDE_INT offset
;
1468 switch (GET_CODE (x
))
1471 new_rtx
= instantiate_new_reg (x
, &offset
);
1474 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1477 iter
.skip_subrtxes ();
1481 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1484 XEXP (x
, 0) = new_rtx
;
1485 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1487 iter
.skip_subrtxes ();
1491 /* FIXME -- from old code */
1492 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1493 we can commute the PLUS and SUBREG because pointers into the
1494 frame are well-behaved. */
1505 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1506 matches the predicate for insn CODE operand OPERAND. */
1509 safe_insn_predicate (int code
, int operand
, rtx x
)
1511 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1514 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1515 registers present inside of insn. The result will be a valid insn. */
1518 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1520 HOST_WIDE_INT offset
;
1522 bool any_change
= false;
1523 rtx set
, new_rtx
, x
;
1526 /* There are some special cases to be handled first. */
1527 set
= single_set (insn
);
1530 /* We're allowed to assign to a virtual register. This is interpreted
1531 to mean that the underlying register gets assigned the inverse
1532 transformation. This is used, for example, in the handling of
1534 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1539 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1540 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1541 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1542 x
= force_operand (x
, new_rtx
);
1544 emit_move_insn (new_rtx
, x
);
1549 emit_insn_before (seq
, insn
);
1554 /* Handle a straight copy from a virtual register by generating a
1555 new add insn. The difference between this and falling through
1556 to the generic case is avoiding a new pseudo and eliminating a
1557 move insn in the initial rtl stream. */
1558 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1559 if (new_rtx
&& offset
!= 0
1560 && REG_P (SET_DEST (set
))
1561 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1565 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1566 gen_int_mode (offset
,
1567 GET_MODE (SET_DEST (set
))),
1568 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1569 if (x
!= SET_DEST (set
))
1570 emit_move_insn (SET_DEST (set
), x
);
1575 emit_insn_before (seq
, insn
);
1580 extract_insn (insn
);
1581 insn_code
= INSN_CODE (insn
);
1583 /* Handle a plus involving a virtual register by determining if the
1584 operands remain valid if they're modified in place. */
1585 if (GET_CODE (SET_SRC (set
)) == PLUS
1586 && recog_data
.n_operands
>= 3
1587 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1588 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1589 && CONST_INT_P (recog_data
.operand
[2])
1590 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1592 offset
+= INTVAL (recog_data
.operand
[2]);
1594 /* If the sum is zero, then replace with a plain move. */
1596 && REG_P (SET_DEST (set
))
1597 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1600 emit_move_insn (SET_DEST (set
), new_rtx
);
1604 emit_insn_before (seq
, insn
);
1609 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1611 /* Using validate_change and apply_change_group here leaves
1612 recog_data in an invalid state. Since we know exactly what
1613 we want to check, do those two by hand. */
1614 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1615 && safe_insn_predicate (insn_code
, 2, x
))
1617 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1618 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1621 /* Fall through into the regular operand fixup loop in
1622 order to take care of operands other than 1 and 2. */
1628 extract_insn (insn
);
1629 insn_code
= INSN_CODE (insn
);
1632 /* In the general case, we expect virtual registers to appear only in
1633 operands, and then only as either bare registers or inside memories. */
1634 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1636 x
= recog_data
.operand
[i
];
1637 switch (GET_CODE (x
))
1641 rtx addr
= XEXP (x
, 0);
1643 if (!instantiate_virtual_regs_in_rtx (&addr
))
1647 x
= replace_equiv_address (x
, addr
, true);
1648 /* It may happen that the address with the virtual reg
1649 was valid (e.g. based on the virtual stack reg, which might
1650 be acceptable to the predicates with all offsets), whereas
1651 the address now isn't anymore, for instance when the address
1652 is still offsetted, but the base reg isn't virtual-stack-reg
1653 anymore. Below we would do a force_reg on the whole operand,
1654 but this insn might actually only accept memory. Hence,
1655 before doing that last resort, try to reload the address into
1656 a register, so this operand stays a MEM. */
1657 if (!safe_insn_predicate (insn_code
, i
, x
))
1659 addr
= force_reg (GET_MODE (addr
), addr
);
1660 x
= replace_equiv_address (x
, addr
, true);
1665 emit_insn_before (seq
, insn
);
1670 new_rtx
= instantiate_new_reg (x
, &offset
);
1671 if (new_rtx
== NULL
)
1679 /* Careful, special mode predicates may have stuff in
1680 insn_data[insn_code].operand[i].mode that isn't useful
1681 to us for computing a new value. */
1682 /* ??? Recognize address_operand and/or "p" constraints
1683 to see if (plus new offset) is a valid before we put
1684 this through expand_simple_binop. */
1685 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1686 gen_int_mode (offset
, GET_MODE (x
)),
1687 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1690 emit_insn_before (seq
, insn
);
1695 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1696 if (new_rtx
== NULL
)
1701 new_rtx
= expand_simple_binop
1702 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1703 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1704 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1707 emit_insn_before (seq
, insn
);
1709 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1710 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1718 /* At this point, X contains the new value for the operand.
1719 Validate the new value vs the insn predicate. Note that
1720 asm insns will have insn_code -1 here. */
1721 if (!safe_insn_predicate (insn_code
, i
, x
))
1726 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1727 x
= copy_to_reg (x
);
1730 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1734 emit_insn_before (seq
, insn
);
1737 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1743 /* Propagate operand changes into the duplicates. */
1744 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1745 *recog_data
.dup_loc
[i
]
1746 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1748 /* Force re-recognition of the instruction for validation. */
1749 INSN_CODE (insn
) = -1;
1752 if (asm_noperands (PATTERN (insn
)) >= 0)
1754 if (!check_asm_operands (PATTERN (insn
)))
1756 error_for_asm (insn
, "impossible constraint in %<asm%>");
1757 /* For asm goto, instead of fixing up all the edges
1758 just clear the template and clear input operands
1759 (asm goto doesn't have any output operands). */
1762 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1763 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1764 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1765 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1773 if (recog_memoized (insn
) < 0)
1774 fatal_insn_not_found (insn
);
1778 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1779 do any instantiation required. */
1782 instantiate_decl_rtl (rtx x
)
1789 /* If this is a CONCAT, recurse for the pieces. */
1790 if (GET_CODE (x
) == CONCAT
)
1792 instantiate_decl_rtl (XEXP (x
, 0));
1793 instantiate_decl_rtl (XEXP (x
, 1));
1797 /* If this is not a MEM, no need to do anything. Similarly if the
1798 address is a constant or a register that is not a virtual register. */
1803 if (CONSTANT_P (addr
)
1805 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1806 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1809 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1812 /* Helper for instantiate_decls called via walk_tree: Process all decls
1813 in the given DECL_VALUE_EXPR. */
1816 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1824 if (DECL_RTL_SET_P (t
))
1825 instantiate_decl_rtl (DECL_RTL (t
));
1826 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1827 && DECL_INCOMING_RTL (t
))
1828 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1829 if ((VAR_P (t
) || TREE_CODE (t
) == RESULT_DECL
)
1830 && DECL_HAS_VALUE_EXPR_P (t
))
1832 tree v
= DECL_VALUE_EXPR (t
);
1833 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1840 /* Subroutine of instantiate_decls: Process all decls in the given
1841 BLOCK node and all its subblocks. */
1844 instantiate_decls_1 (tree let
)
1848 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1850 if (DECL_RTL_SET_P (t
))
1851 instantiate_decl_rtl (DECL_RTL (t
));
1852 if (VAR_P (t
) && DECL_HAS_VALUE_EXPR_P (t
))
1854 tree v
= DECL_VALUE_EXPR (t
);
1855 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1859 /* Process all subblocks. */
1860 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1861 instantiate_decls_1 (t
);
1864 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1865 all virtual registers in their DECL_RTL's. */
1868 instantiate_decls (tree fndecl
)
1873 /* Process all parameters of the function. */
1874 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1876 instantiate_decl_rtl (DECL_RTL (decl
));
1877 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1878 if (DECL_HAS_VALUE_EXPR_P (decl
))
1880 tree v
= DECL_VALUE_EXPR (decl
);
1881 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1885 if ((decl
= DECL_RESULT (fndecl
))
1886 && TREE_CODE (decl
) == RESULT_DECL
)
1888 if (DECL_RTL_SET_P (decl
))
1889 instantiate_decl_rtl (DECL_RTL (decl
));
1890 if (DECL_HAS_VALUE_EXPR_P (decl
))
1892 tree v
= DECL_VALUE_EXPR (decl
);
1893 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1897 /* Process the saved static chain if it exists. */
1898 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1899 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1900 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1902 /* Now process all variables defined in the function or its subblocks. */
1903 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1905 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1906 if (DECL_RTL_SET_P (decl
))
1907 instantiate_decl_rtl (DECL_RTL (decl
));
1908 vec_free (cfun
->local_decls
);
1911 /* Pass through the INSNS of function FNDECL and convert virtual register
1912 references to hard register references. */
1915 instantiate_virtual_regs (void)
1919 /* Compute the offsets to use for this function. */
1920 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1921 var_offset
= STARTING_FRAME_OFFSET
;
1922 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1923 out_arg_offset
= STACK_POINTER_OFFSET
;
1924 #ifdef FRAME_POINTER_CFA_OFFSET
1925 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1927 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1930 /* Initialize recognition, indicating that volatile is OK. */
1933 /* Scan through all the insns, instantiating every virtual register still
1935 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1938 /* These patterns in the instruction stream can never be recognized.
1939 Fortunately, they shouldn't contain virtual registers either. */
1940 if (GET_CODE (PATTERN (insn
)) == USE
1941 || GET_CODE (PATTERN (insn
)) == CLOBBER
1942 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1944 else if (DEBUG_INSN_P (insn
))
1945 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn
));
1947 instantiate_virtual_regs_in_insn (insn
);
1949 if (insn
->deleted ())
1952 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1954 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1956 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1959 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1960 instantiate_decls (current_function_decl
);
1962 targetm
.instantiate_decls ();
1964 /* Indicate that, from now on, assign_stack_local should use
1965 frame_pointer_rtx. */
1966 virtuals_instantiated
= 1;
1973 const pass_data pass_data_instantiate_virtual_regs
=
1975 RTL_PASS
, /* type */
1977 OPTGROUP_NONE
, /* optinfo_flags */
1978 TV_NONE
, /* tv_id */
1979 0, /* properties_required */
1980 0, /* properties_provided */
1981 0, /* properties_destroyed */
1982 0, /* todo_flags_start */
1983 0, /* todo_flags_finish */
1986 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1989 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1990 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
1993 /* opt_pass methods: */
1994 virtual unsigned int execute (function
*)
1996 return instantiate_virtual_regs ();
1999 }; // class pass_instantiate_virtual_regs
2004 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2006 return new pass_instantiate_virtual_regs (ctxt
);
2010 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2011 This means a type for which function calls must pass an address to the
2012 function or get an address back from the function.
2013 EXP may be a type node or an expression (whose type is tested). */
2016 aggregate_value_p (const_tree exp
, const_tree fntype
)
2018 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2019 int i
, regno
, nregs
;
2023 switch (TREE_CODE (fntype
))
2027 tree fndecl
= get_callee_fndecl (fntype
);
2029 fntype
= TREE_TYPE (fndecl
);
2030 else if (CALL_EXPR_FN (fntype
))
2031 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2033 /* For internal functions, assume nothing needs to be
2034 returned in memory. */
2039 fntype
= TREE_TYPE (fntype
);
2044 case IDENTIFIER_NODE
:
2048 /* We don't expect other tree types here. */
2052 if (VOID_TYPE_P (type
))
2055 /* If a record should be passed the same as its first (and only) member
2056 don't pass it as an aggregate. */
2057 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2058 return aggregate_value_p (first_field (type
), fntype
);
2060 /* If the front end has decided that this needs to be passed by
2061 reference, do so. */
2062 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2063 && DECL_BY_REFERENCE (exp
))
2066 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2067 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2070 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2071 and thus can't be returned in registers. */
2072 if (TREE_ADDRESSABLE (type
))
2075 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2078 if (targetm
.calls
.return_in_memory (type
, fntype
))
2081 /* Make sure we have suitable call-clobbered regs to return
2082 the value in; if not, we must return it in memory. */
2083 reg
= hard_function_value (type
, 0, fntype
, 0);
2085 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2090 regno
= REGNO (reg
);
2091 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2092 for (i
= 0; i
< nregs
; i
++)
2093 if (! call_used_regs
[regno
+ i
])
2099 /* Return true if we should assign DECL a pseudo register; false if it
2100 should live on the local stack. */
2103 use_register_for_decl (const_tree decl
)
2105 if (TREE_CODE (decl
) == SSA_NAME
)
2107 /* We often try to use the SSA_NAME, instead of its underlying
2108 decl, to get type information and guide decisions, to avoid
2109 differences of behavior between anonymous and named
2110 variables, but in this one case we have to go for the actual
2111 variable if there is one. The main reason is that, at least
2112 at -O0, we want to place user variables on the stack, but we
2113 don't mind using pseudos for anonymous or ignored temps.
2114 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2115 should go in pseudos, whereas their corresponding variables
2116 might have to go on the stack. So, disregarding the decl
2117 here would negatively impact debug info at -O0, enable
2118 coalescing between SSA_NAMEs that ought to get different
2119 stack/pseudo assignments, and get the incoming argument
2120 processing thoroughly confused by PARM_DECLs expected to live
2121 in stack slots but assigned to pseudos. */
2122 if (!SSA_NAME_VAR (decl
))
2123 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2124 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2126 decl
= SSA_NAME_VAR (decl
);
2129 /* Honor volatile. */
2130 if (TREE_SIDE_EFFECTS (decl
))
2133 /* Honor addressability. */
2134 if (TREE_ADDRESSABLE (decl
))
2137 /* RESULT_DECLs are a bit special in that they're assigned without
2138 regard to use_register_for_decl, but we generally only store in
2139 them. If we coalesce their SSA NAMEs, we'd better return a
2140 result that matches the assignment in expand_function_start. */
2141 if (TREE_CODE (decl
) == RESULT_DECL
)
2143 /* If it's not an aggregate, we're going to use a REG or a
2144 PARALLEL containing a REG. */
2145 if (!aggregate_value_p (decl
, current_function_decl
))
2148 /* If expand_function_start determines the return value, we'll
2149 use MEM if it's not by reference. */
2150 if (cfun
->returns_pcc_struct
2151 || (targetm
.calls
.struct_value_rtx
2152 (TREE_TYPE (current_function_decl
), 1)))
2153 return DECL_BY_REFERENCE (decl
);
2155 /* Otherwise, we're taking an extra all.function_result_decl
2156 argument. It's set up in assign_parms_augmented_arg_list,
2157 under the (negated) conditions above, and then it's used to
2158 set up the RESULT_DECL rtl in assign_params, after looping
2159 over all parameters. Now, if the RESULT_DECL is not by
2160 reference, we'll use a MEM either way. */
2161 if (!DECL_BY_REFERENCE (decl
))
2164 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2165 the function_result_decl's assignment. Since it's a pointer,
2166 we can short-circuit a number of the tests below, and we must
2167 duplicat e them because we don't have the
2168 function_result_decl to test. */
2169 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2171 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2174 /* We don't set DECL_REGISTER for the function_result_decl. */
2178 /* Decl is implicitly addressible by bound stores and loads
2179 if it is an aggregate holding bounds. */
2180 if (chkp_function_instrumented_p (current_function_decl
)
2182 && !BOUNDED_P (decl
)
2183 && chkp_type_has_pointer (TREE_TYPE (decl
)))
2186 /* Only register-like things go in registers. */
2187 if (DECL_MODE (decl
) == BLKmode
)
2190 /* If -ffloat-store specified, don't put explicit float variables
2192 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2193 propagates values across these stores, and it probably shouldn't. */
2194 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2197 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2200 /* If we're not interested in tracking debugging information for
2201 this decl, then we can certainly put it in a register. */
2202 if (DECL_IGNORED_P (decl
))
2208 if (!DECL_REGISTER (decl
))
2211 switch (TREE_CODE (TREE_TYPE (decl
)))
2215 case QUAL_UNION_TYPE
:
2216 /* When not optimizing, disregard register keyword for variables with
2217 types containing methods, otherwise the methods won't be callable
2218 from the debugger. */
2219 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))))
2229 /* Structures to communicate between the subroutines of assign_parms.
2230 The first holds data persistent across all parameters, the second
2231 is cleared out for each parameter. */
2233 struct assign_parm_data_all
2235 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2236 should become a job of the target or otherwise encapsulated. */
2237 CUMULATIVE_ARGS args_so_far_v
;
2238 cumulative_args_t args_so_far
;
2239 struct args_size stack_args_size
;
2240 tree function_result_decl
;
2242 rtx_insn
*first_conversion_insn
;
2243 rtx_insn
*last_conversion_insn
;
2244 HOST_WIDE_INT pretend_args_size
;
2245 HOST_WIDE_INT extra_pretend_bytes
;
2246 int reg_parm_stack_space
;
2249 struct assign_parm_data_one
2255 machine_mode nominal_mode
;
2256 machine_mode passed_mode
;
2257 machine_mode promoted_mode
;
2258 struct locate_and_pad_arg_data locate
;
2260 BOOL_BITFIELD named_arg
: 1;
2261 BOOL_BITFIELD passed_pointer
: 1;
2262 BOOL_BITFIELD on_stack
: 1;
2263 BOOL_BITFIELD loaded_in_reg
: 1;
2266 struct bounds_parm_data
2268 assign_parm_data_one parm_data
;
2275 /* A subroutine of assign_parms. Initialize ALL. */
2278 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2280 tree fntype ATTRIBUTE_UNUSED
;
2282 memset (all
, 0, sizeof (*all
));
2284 fntype
= TREE_TYPE (current_function_decl
);
2286 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2287 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2289 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2290 current_function_decl
, -1);
2292 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2294 #ifdef INCOMING_REG_PARM_STACK_SPACE
2295 all
->reg_parm_stack_space
2296 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2300 /* If ARGS contains entries with complex types, split the entry into two
2301 entries of the component type. Return a new list of substitutions are
2302 needed, else the old list. */
2305 split_complex_args (vec
<tree
> *args
)
2310 FOR_EACH_VEC_ELT (*args
, i
, p
)
2312 tree type
= TREE_TYPE (p
);
2313 if (TREE_CODE (type
) == COMPLEX_TYPE
2314 && targetm
.calls
.split_complex_arg (type
))
2317 tree subtype
= TREE_TYPE (type
);
2318 bool addressable
= TREE_ADDRESSABLE (p
);
2320 /* Rewrite the PARM_DECL's type with its component. */
2322 TREE_TYPE (p
) = subtype
;
2323 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2324 DECL_MODE (p
) = VOIDmode
;
2325 DECL_SIZE (p
) = NULL
;
2326 DECL_SIZE_UNIT (p
) = NULL
;
2327 /* If this arg must go in memory, put it in a pseudo here.
2328 We can't allow it to go in memory as per normal parms,
2329 because the usual place might not have the imag part
2330 adjacent to the real part. */
2331 DECL_ARTIFICIAL (p
) = addressable
;
2332 DECL_IGNORED_P (p
) = addressable
;
2333 TREE_ADDRESSABLE (p
) = 0;
2337 /* Build a second synthetic decl. */
2338 decl
= build_decl (EXPR_LOCATION (p
),
2339 PARM_DECL
, NULL_TREE
, subtype
);
2340 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2341 DECL_ARTIFICIAL (decl
) = addressable
;
2342 DECL_IGNORED_P (decl
) = addressable
;
2343 layout_decl (decl
, 0);
2344 args
->safe_insert (++i
, decl
);
2349 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2350 the hidden struct return argument, and (abi willing) complex args.
2351 Return the new parameter list. */
2354 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2356 tree fndecl
= current_function_decl
;
2357 tree fntype
= TREE_TYPE (fndecl
);
2358 vec
<tree
> fnargs
= vNULL
;
2361 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2362 fnargs
.safe_push (arg
);
2364 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2366 /* If struct value address is treated as the first argument, make it so. */
2367 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2368 && ! cfun
->returns_pcc_struct
2369 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2371 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2374 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2375 PARM_DECL
, get_identifier (".result_ptr"), type
);
2376 DECL_ARG_TYPE (decl
) = type
;
2377 DECL_ARTIFICIAL (decl
) = 1;
2378 DECL_NAMELESS (decl
) = 1;
2379 TREE_CONSTANT (decl
) = 1;
2380 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2381 changes, the end of the RESULT_DECL handling block in
2382 use_register_for_decl must be adjusted to match. */
2384 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2385 all
->orig_fnargs
= decl
;
2386 fnargs
.safe_insert (0, decl
);
2388 all
->function_result_decl
= decl
;
2390 /* If function is instrumented then bounds of the
2391 passed structure address is the second argument. */
2392 if (chkp_function_instrumented_p (fndecl
))
2394 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2395 PARM_DECL
, get_identifier (".result_bnd"),
2396 pointer_bounds_type_node
);
2397 DECL_ARG_TYPE (decl
) = pointer_bounds_type_node
;
2398 DECL_ARTIFICIAL (decl
) = 1;
2399 DECL_NAMELESS (decl
) = 1;
2400 TREE_CONSTANT (decl
) = 1;
2402 DECL_CHAIN (decl
) = DECL_CHAIN (all
->orig_fnargs
);
2403 DECL_CHAIN (all
->orig_fnargs
) = decl
;
2404 fnargs
.safe_insert (1, decl
);
2408 /* If the target wants to split complex arguments into scalars, do so. */
2409 if (targetm
.calls
.split_complex_arg
)
2410 split_complex_args (&fnargs
);
2415 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2416 data for the parameter. Incorporate ABI specifics such as pass-by-
2417 reference and type promotion. */
2420 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2421 struct assign_parm_data_one
*data
)
2423 tree nominal_type
, passed_type
;
2424 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2427 memset (data
, 0, sizeof (*data
));
2429 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2431 data
->named_arg
= 1; /* No variadic parms. */
2432 else if (DECL_CHAIN (parm
))
2433 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2434 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2435 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2437 data
->named_arg
= 0; /* Treat as variadic. */
2439 nominal_type
= TREE_TYPE (parm
);
2440 passed_type
= DECL_ARG_TYPE (parm
);
2442 /* Look out for errors propagating this far. Also, if the parameter's
2443 type is void then its value doesn't matter. */
2444 if (TREE_TYPE (parm
) == error_mark_node
2445 /* This can happen after weird syntax errors
2446 or if an enum type is defined among the parms. */
2447 || TREE_CODE (parm
) != PARM_DECL
2448 || passed_type
== NULL
2449 || VOID_TYPE_P (nominal_type
))
2451 nominal_type
= passed_type
= void_type_node
;
2452 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2456 /* Find mode of arg as it is passed, and mode of arg as it should be
2457 during execution of this function. */
2458 passed_mode
= TYPE_MODE (passed_type
);
2459 nominal_mode
= TYPE_MODE (nominal_type
);
2461 /* If the parm is to be passed as a transparent union or record, use the
2462 type of the first field for the tests below. We have already verified
2463 that the modes are the same. */
2464 if ((TREE_CODE (passed_type
) == UNION_TYPE
2465 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2466 && TYPE_TRANSPARENT_AGGR (passed_type
))
2467 passed_type
= TREE_TYPE (first_field (passed_type
));
2469 /* See if this arg was passed by invisible reference. */
2470 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2471 passed_type
, data
->named_arg
))
2473 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2474 data
->passed_pointer
= true;
2475 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2478 /* Find mode as it is passed by the ABI. */
2479 unsignedp
= TYPE_UNSIGNED (passed_type
);
2480 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2481 TREE_TYPE (current_function_decl
), 0);
2484 data
->nominal_type
= nominal_type
;
2485 data
->passed_type
= passed_type
;
2486 data
->nominal_mode
= nominal_mode
;
2487 data
->passed_mode
= passed_mode
;
2488 data
->promoted_mode
= promoted_mode
;
2491 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2494 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2495 struct assign_parm_data_one
*data
, bool no_rtl
)
2497 int varargs_pretend_bytes
= 0;
2499 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2500 data
->promoted_mode
,
2502 &varargs_pretend_bytes
, no_rtl
);
2504 /* If the back-end has requested extra stack space, record how much is
2505 needed. Do not change pretend_args_size otherwise since it may be
2506 nonzero from an earlier partial argument. */
2507 if (varargs_pretend_bytes
> 0)
2508 all
->pretend_args_size
= varargs_pretend_bytes
;
2511 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2512 the incoming location of the current parameter. */
2515 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2516 struct assign_parm_data_one
*data
)
2518 HOST_WIDE_INT pretend_bytes
= 0;
2522 if (data
->promoted_mode
== VOIDmode
)
2524 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2528 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2529 data
->promoted_mode
,
2533 if (entry_parm
== 0)
2534 data
->promoted_mode
= data
->passed_mode
;
2536 /* Determine parm's home in the stack, in case it arrives in the stack
2537 or we should pretend it did. Compute the stack position and rtx where
2538 the argument arrives and its size.
2540 There is one complexity here: If this was a parameter that would
2541 have been passed in registers, but wasn't only because it is
2542 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2543 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2544 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2545 as it was the previous time. */
2546 in_regs
= (entry_parm
!= 0) || POINTER_BOUNDS_TYPE_P (data
->passed_type
);
2547 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2550 if (!in_regs
&& !data
->named_arg
)
2552 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2555 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2556 data
->promoted_mode
,
2557 data
->passed_type
, true);
2558 in_regs
= tem
!= NULL
;
2562 /* If this parameter was passed both in registers and in the stack, use
2563 the copy on the stack. */
2564 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2572 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2573 data
->promoted_mode
,
2576 data
->partial
= partial
;
2578 /* The caller might already have allocated stack space for the
2579 register parameters. */
2580 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2582 /* Part of this argument is passed in registers and part
2583 is passed on the stack. Ask the prologue code to extend
2584 the stack part so that we can recreate the full value.
2586 PRETEND_BYTES is the size of the registers we need to store.
2587 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2588 stack space that the prologue should allocate.
2590 Internally, gcc assumes that the argument pointer is aligned
2591 to STACK_BOUNDARY bits. This is used both for alignment
2592 optimizations (see init_emit) and to locate arguments that are
2593 aligned to more than PARM_BOUNDARY bits. We must preserve this
2594 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2595 a stack boundary. */
2597 /* We assume at most one partial arg, and it must be the first
2598 argument on the stack. */
2599 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2601 pretend_bytes
= partial
;
2602 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2604 /* We want to align relative to the actual stack pointer, so
2605 don't include this in the stack size until later. */
2606 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2610 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2611 all
->reg_parm_stack_space
,
2612 entry_parm
? data
->partial
: 0, current_function_decl
,
2613 &all
->stack_args_size
, &data
->locate
);
2615 /* Update parm_stack_boundary if this parameter is passed in the
2617 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2618 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2620 /* Adjust offsets to include the pretend args. */
2621 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2622 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2623 data
->locate
.offset
.constant
+= pretend_bytes
;
2625 data
->entry_parm
= entry_parm
;
2628 /* A subroutine of assign_parms. If there is actually space on the stack
2629 for this parm, count it in stack_args_size and return true. */
2632 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2633 struct assign_parm_data_one
*data
)
2635 /* Bounds are never passed on the stack to keep compatibility
2636 with not instrumented code. */
2637 if (POINTER_BOUNDS_TYPE_P (data
->passed_type
))
2639 /* Trivially true if we've no incoming register. */
2640 else if (data
->entry_parm
== NULL
)
2642 /* Also true if we're partially in registers and partially not,
2643 since we've arranged to drop the entire argument on the stack. */
2644 else if (data
->partial
!= 0)
2646 /* Also true if the target says that it's passed in both registers
2647 and on the stack. */
2648 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2649 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2651 /* Also true if the target says that there's stack allocated for
2652 all register parameters. */
2653 else if (all
->reg_parm_stack_space
> 0)
2655 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2659 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2660 if (data
->locate
.size
.var
)
2661 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2666 /* A subroutine of assign_parms. Given that this parameter is allocated
2667 stack space by the ABI, find it. */
2670 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2672 rtx offset_rtx
, stack_parm
;
2673 unsigned int align
, boundary
;
2675 /* If we're passing this arg using a reg, make its stack home the
2676 aligned stack slot. */
2677 if (data
->entry_parm
)
2678 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2680 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2682 stack_parm
= crtl
->args
.internal_arg_pointer
;
2683 if (offset_rtx
!= const0_rtx
)
2684 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2685 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2687 if (!data
->passed_pointer
)
2689 set_mem_attributes (stack_parm
, parm
, 1);
2690 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2691 while promoted mode's size is needed. */
2692 if (data
->promoted_mode
!= BLKmode
2693 && data
->promoted_mode
!= DECL_MODE (parm
))
2695 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2696 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2698 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2699 data
->promoted_mode
);
2701 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2706 boundary
= data
->locate
.boundary
;
2707 align
= BITS_PER_UNIT
;
2709 /* If we're padding upward, we know that the alignment of the slot
2710 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2711 intentionally forcing upward padding. Otherwise we have to come
2712 up with a guess at the alignment based on OFFSET_RTX. */
2713 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2715 else if (CONST_INT_P (offset_rtx
))
2717 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2718 align
= least_bit_hwi (align
);
2720 set_mem_align (stack_parm
, align
);
2722 if (data
->entry_parm
)
2723 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2725 data
->stack_parm
= stack_parm
;
2728 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2729 always valid and contiguous. */
2732 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2734 rtx entry_parm
= data
->entry_parm
;
2735 rtx stack_parm
= data
->stack_parm
;
2737 /* If this parm was passed part in regs and part in memory, pretend it
2738 arrived entirely in memory by pushing the register-part onto the stack.
2739 In the special case of a DImode or DFmode that is split, we could put
2740 it together in a pseudoreg directly, but for now that's not worth
2742 if (data
->partial
!= 0)
2744 /* Handle calls that pass values in multiple non-contiguous
2745 locations. The Irix 6 ABI has examples of this. */
2746 if (GET_CODE (entry_parm
) == PARALLEL
)
2747 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2749 int_size_in_bytes (data
->passed_type
));
2752 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2753 move_block_from_reg (REGNO (entry_parm
),
2754 validize_mem (copy_rtx (stack_parm
)),
2755 data
->partial
/ UNITS_PER_WORD
);
2758 entry_parm
= stack_parm
;
2761 /* If we didn't decide this parm came in a register, by default it came
2763 else if (entry_parm
== NULL
)
2764 entry_parm
= stack_parm
;
2766 /* When an argument is passed in multiple locations, we can't make use
2767 of this information, but we can save some copying if the whole argument
2768 is passed in a single register. */
2769 else if (GET_CODE (entry_parm
) == PARALLEL
2770 && data
->nominal_mode
!= BLKmode
2771 && data
->passed_mode
!= BLKmode
)
2773 size_t i
, len
= XVECLEN (entry_parm
, 0);
2775 for (i
= 0; i
< len
; i
++)
2776 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2777 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2778 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2779 == data
->passed_mode
)
2780 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2782 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2787 data
->entry_parm
= entry_parm
;
2790 /* A subroutine of assign_parms. Reconstitute any values which were
2791 passed in multiple registers and would fit in a single register. */
2794 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2796 rtx entry_parm
= data
->entry_parm
;
2798 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2799 This can be done with register operations rather than on the
2800 stack, even if we will store the reconstituted parameter on the
2802 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2804 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2805 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2806 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2807 entry_parm
= parmreg
;
2810 data
->entry_parm
= entry_parm
;
2813 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2814 always valid and properly aligned. */
2817 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2819 rtx stack_parm
= data
->stack_parm
;
2821 /* If we can't trust the parm stack slot to be aligned enough for its
2822 ultimate type, don't use that slot after entry. We'll make another
2823 stack slot, if we need one. */
2825 && ((STRICT_ALIGNMENT
2826 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2827 || (data
->nominal_type
2828 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2829 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2832 /* If parm was passed in memory, and we need to convert it on entry,
2833 don't store it back in that same slot. */
2834 else if (data
->entry_parm
== stack_parm
2835 && data
->nominal_mode
!= BLKmode
2836 && data
->nominal_mode
!= data
->passed_mode
)
2839 /* If stack protection is in effect for this function, don't leave any
2840 pointers in their passed stack slots. */
2841 else if (crtl
->stack_protect_guard
2842 && (flag_stack_protect
== 2
2843 || data
->passed_pointer
2844 || POINTER_TYPE_P (data
->nominal_type
)))
2847 data
->stack_parm
= stack_parm
;
2850 /* A subroutine of assign_parms. Return true if the current parameter
2851 should be stored as a BLKmode in the current frame. */
2854 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2856 if (data
->nominal_mode
== BLKmode
)
2858 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2861 #ifdef BLOCK_REG_PADDING
2862 /* Only assign_parm_setup_block knows how to deal with register arguments
2863 that are padded at the least significant end. */
2864 if (REG_P (data
->entry_parm
)
2865 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2866 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2867 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2874 /* A subroutine of assign_parms. Arrange for the parameter to be
2875 present and valid in DATA->STACK_RTL. */
2878 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2879 tree parm
, struct assign_parm_data_one
*data
)
2881 rtx entry_parm
= data
->entry_parm
;
2882 rtx stack_parm
= data
->stack_parm
;
2883 rtx target_reg
= NULL_RTX
;
2884 bool in_conversion_seq
= false;
2886 HOST_WIDE_INT size_stored
;
2888 if (GET_CODE (entry_parm
) == PARALLEL
)
2889 entry_parm
= emit_group_move_into_temps (entry_parm
);
2891 /* If we want the parameter in a pseudo, don't use a stack slot. */
2892 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2894 tree def
= ssa_default_def (cfun
, parm
);
2896 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2897 rtx reg
= gen_reg_rtx (mode
);
2898 if (GET_CODE (reg
) != CONCAT
)
2903 /* Avoid allocating a stack slot, if there isn't one
2904 preallocated by the ABI. It might seem like we should
2905 always prefer a pseudo, but converting between
2906 floating-point and integer modes goes through the stack
2907 on various machines, so it's better to use the reserved
2908 stack slot than to risk wasting it and allocating more
2909 for the conversion. */
2910 if (stack_parm
== NULL_RTX
)
2912 int save
= generating_concat_p
;
2913 generating_concat_p
= 0;
2914 stack_parm
= gen_reg_rtx (mode
);
2915 generating_concat_p
= save
;
2918 data
->stack_parm
= NULL
;
2921 size
= int_size_in_bytes (data
->passed_type
);
2922 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2923 if (stack_parm
== 0)
2925 SET_DECL_ALIGN (parm
, MAX (DECL_ALIGN (parm
), BITS_PER_WORD
));
2926 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2928 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2929 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2930 set_mem_attributes (stack_parm
, parm
, 1);
2933 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2934 calls that pass values in multiple non-contiguous locations. */
2935 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2939 /* Note that we will be storing an integral number of words.
2940 So we have to be careful to ensure that we allocate an
2941 integral number of words. We do this above when we call
2942 assign_stack_local if space was not allocated in the argument
2943 list. If it was, this will not work if PARM_BOUNDARY is not
2944 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2945 if it becomes a problem. Exception is when BLKmode arrives
2946 with arguments not conforming to word_mode. */
2948 if (data
->stack_parm
== 0)
2950 else if (GET_CODE (entry_parm
) == PARALLEL
)
2953 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2955 mem
= validize_mem (copy_rtx (stack_parm
));
2957 /* Handle values in multiple non-contiguous locations. */
2958 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2959 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2960 else if (GET_CODE (entry_parm
) == PARALLEL
)
2962 push_to_sequence2 (all
->first_conversion_insn
,
2963 all
->last_conversion_insn
);
2964 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2965 all
->first_conversion_insn
= get_insns ();
2966 all
->last_conversion_insn
= get_last_insn ();
2968 in_conversion_seq
= true;
2974 /* If SIZE is that of a mode no bigger than a word, just use
2975 that mode's store operation. */
2976 else if (size
<= UNITS_PER_WORD
)
2979 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2982 #ifdef BLOCK_REG_PADDING
2983 && (size
== UNITS_PER_WORD
2984 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2985 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2991 /* We are really truncating a word_mode value containing
2992 SIZE bytes into a value of mode MODE. If such an
2993 operation requires no actual instructions, we can refer
2994 to the value directly in mode MODE, otherwise we must
2995 start with the register in word_mode and explicitly
2997 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2998 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3001 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3002 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3004 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3007 #ifdef BLOCK_REG_PADDING
3008 /* Storing the register in memory as a full word, as
3009 move_block_from_reg below would do, and then using the
3010 MEM in a smaller mode, has the effect of shifting right
3011 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3012 shifting must be explicit. */
3013 else if (!MEM_P (mem
))
3017 /* If the assert below fails, we should have taken the
3018 mode != BLKmode path above, unless we have downward
3019 padding of smaller-than-word arguments on a machine
3020 with little-endian bytes, which would likely require
3021 additional changes to work correctly. */
3022 gcc_checking_assert (BYTES_BIG_ENDIAN
3023 && (BLOCK_REG_PADDING (mode
,
3024 data
->passed_type
, 1)
3027 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3029 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3030 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3032 x
= force_reg (word_mode
, x
);
3033 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3035 emit_move_insn (mem
, x
);
3039 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3040 machine must be aligned to the left before storing
3041 to memory. Note that the previous test doesn't
3042 handle all cases (e.g. SIZE == 3). */
3043 else if (size
!= UNITS_PER_WORD
3044 #ifdef BLOCK_REG_PADDING
3045 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
3053 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3054 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3056 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3057 tem
= change_address (mem
, word_mode
, 0);
3058 emit_move_insn (tem
, x
);
3061 move_block_from_reg (REGNO (entry_parm
), mem
,
3062 size_stored
/ UNITS_PER_WORD
);
3064 else if (!MEM_P (mem
))
3066 gcc_checking_assert (size
> UNITS_PER_WORD
);
3067 #ifdef BLOCK_REG_PADDING
3068 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3069 data
->passed_type
, 0)
3072 emit_move_insn (mem
, entry_parm
);
3075 move_block_from_reg (REGNO (entry_parm
), mem
,
3076 size_stored
/ UNITS_PER_WORD
);
3078 else if (data
->stack_parm
== 0)
3080 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3081 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3083 all
->first_conversion_insn
= get_insns ();
3084 all
->last_conversion_insn
= get_last_insn ();
3086 in_conversion_seq
= true;
3091 if (!in_conversion_seq
)
3092 emit_move_insn (target_reg
, stack_parm
);
3095 push_to_sequence2 (all
->first_conversion_insn
,
3096 all
->last_conversion_insn
);
3097 emit_move_insn (target_reg
, stack_parm
);
3098 all
->first_conversion_insn
= get_insns ();
3099 all
->last_conversion_insn
= get_last_insn ();
3102 stack_parm
= target_reg
;
3105 data
->stack_parm
= stack_parm
;
3106 set_parm_rtl (parm
, stack_parm
);
3109 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3110 parameter. Get it there. Perform all ABI specified conversions. */
3113 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3114 struct assign_parm_data_one
*data
)
3116 rtx parmreg
, validated_mem
;
3117 rtx equiv_stack_parm
;
3118 machine_mode promoted_nominal_mode
;
3119 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3120 bool did_conversion
= false;
3121 bool need_conversion
, moved
;
3124 /* Store the parm in a pseudoregister during the function, but we may
3125 need to do it in a wider mode. Using 2 here makes the result
3126 consistent with promote_decl_mode and thus expand_expr_real_1. */
3127 promoted_nominal_mode
3128 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3129 TREE_TYPE (current_function_decl
), 2);
3131 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3132 if (!DECL_ARTIFICIAL (parm
))
3133 mark_user_reg (parmreg
);
3135 /* If this was an item that we received a pointer to,
3136 set rtl appropriately. */
3137 if (data
->passed_pointer
)
3139 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3140 set_mem_attributes (rtl
, parm
, 1);
3145 assign_parm_remove_parallels (data
);
3147 /* Copy the value into the register, thus bridging between
3148 assign_parm_find_data_types and expand_expr_real_1. */
3150 equiv_stack_parm
= data
->stack_parm
;
3151 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3153 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3154 || promoted_nominal_mode
!= data
->promoted_mode
);
3158 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3159 && data
->nominal_mode
== data
->passed_mode
3160 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3162 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3163 mode, by the caller. We now have to convert it to
3164 NOMINAL_MODE, if different. However, PARMREG may be in
3165 a different mode than NOMINAL_MODE if it is being stored
3168 If ENTRY_PARM is a hard register, it might be in a register
3169 not valid for operating in its mode (e.g., an odd-numbered
3170 register for a DFmode). In that case, moves are the only
3171 thing valid, so we can't do a convert from there. This
3172 occurs when the calling sequence allow such misaligned
3175 In addition, the conversion may involve a call, which could
3176 clobber parameters which haven't been copied to pseudo
3179 First, we try to emit an insn which performs the necessary
3180 conversion. We verify that this insn does not clobber any
3183 enum insn_code icode
;
3186 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3190 op1
= validated_mem
;
3191 if (icode
!= CODE_FOR_nothing
3192 && insn_operand_matches (icode
, 0, op0
)
3193 && insn_operand_matches (icode
, 1, op1
))
3195 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3196 rtx_insn
*insn
, *insns
;
3198 HARD_REG_SET hardregs
;
3201 /* If op1 is a hard register that is likely spilled, first
3202 force it into a pseudo, otherwise combiner might extend
3203 its lifetime too much. */
3204 if (GET_CODE (t
) == SUBREG
)
3207 && HARD_REGISTER_P (t
)
3208 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3209 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3211 t
= gen_reg_rtx (GET_MODE (op1
));
3212 emit_move_insn (t
, op1
);
3216 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3217 data
->passed_mode
, unsignedp
);
3219 insns
= get_insns ();
3222 CLEAR_HARD_REG_SET (hardregs
);
3223 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3226 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3228 if (!hard_reg_set_empty_p (hardregs
))
3237 if (equiv_stack_parm
!= NULL_RTX
)
3238 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3245 /* Nothing to do. */
3247 else if (need_conversion
)
3249 /* We did not have an insn to convert directly, or the sequence
3250 generated appeared unsafe. We must first copy the parm to a
3251 pseudo reg, and save the conversion until after all
3252 parameters have been moved. */
3255 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3257 emit_move_insn (tempreg
, validated_mem
);
3259 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3260 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3262 if (GET_CODE (tempreg
) == SUBREG
3263 && GET_MODE (tempreg
) == data
->nominal_mode
3264 && REG_P (SUBREG_REG (tempreg
))
3265 && data
->nominal_mode
== data
->passed_mode
3266 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3267 && GET_MODE_SIZE (GET_MODE (tempreg
))
3268 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3270 /* The argument is already sign/zero extended, so note it
3272 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3273 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3276 /* TREE_USED gets set erroneously during expand_assignment. */
3277 save_tree_used
= TREE_USED (parm
);
3278 SET_DECL_RTL (parm
, rtl
);
3279 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3280 SET_DECL_RTL (parm
, NULL_RTX
);
3281 TREE_USED (parm
) = save_tree_used
;
3282 all
->first_conversion_insn
= get_insns ();
3283 all
->last_conversion_insn
= get_last_insn ();
3286 did_conversion
= true;
3289 emit_move_insn (parmreg
, validated_mem
);
3291 /* If we were passed a pointer but the actual value can safely live
3292 in a register, retrieve it and use it directly. */
3293 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3295 /* We can't use nominal_mode, because it will have been set to
3296 Pmode above. We must use the actual mode of the parm. */
3297 if (use_register_for_decl (parm
))
3299 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3300 mark_user_reg (parmreg
);
3304 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3305 TYPE_MODE (TREE_TYPE (parm
)),
3306 TYPE_ALIGN (TREE_TYPE (parm
)));
3308 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3309 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3311 set_mem_attributes (parmreg
, parm
, 1);
3314 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3315 the debug info in case it is not legitimate. */
3316 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3318 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3319 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3321 push_to_sequence2 (all
->first_conversion_insn
,
3322 all
->last_conversion_insn
);
3323 emit_move_insn (tempreg
, rtl
);
3324 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3325 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3327 all
->first_conversion_insn
= get_insns ();
3328 all
->last_conversion_insn
= get_last_insn ();
3331 did_conversion
= true;
3334 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3338 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3340 data
->stack_parm
= NULL
;
3343 set_parm_rtl (parm
, rtl
);
3345 /* Mark the register as eliminable if we did no conversion and it was
3346 copied from memory at a fixed offset, and the arg pointer was not
3347 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3348 offset formed an invalid address, such memory-equivalences as we
3349 make here would screw up life analysis for it. */
3350 if (data
->nominal_mode
== data
->passed_mode
3352 && data
->stack_parm
!= 0
3353 && MEM_P (data
->stack_parm
)
3354 && data
->locate
.offset
.var
== 0
3355 && reg_mentioned_p (virtual_incoming_args_rtx
,
3356 XEXP (data
->stack_parm
, 0)))
3358 rtx_insn
*linsn
= get_last_insn ();
3362 /* Mark complex types separately. */
3363 if (GET_CODE (parmreg
) == CONCAT
)
3365 machine_mode submode
3366 = GET_MODE_INNER (GET_MODE (parmreg
));
3367 int regnor
= REGNO (XEXP (parmreg
, 0));
3368 int regnoi
= REGNO (XEXP (parmreg
, 1));
3369 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3370 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3371 GET_MODE_SIZE (submode
));
3373 /* Scan backwards for the set of the real and
3375 for (sinsn
= linsn
; sinsn
!= 0;
3376 sinsn
= prev_nonnote_insn (sinsn
))
3378 set
= single_set (sinsn
);
3382 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3383 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3384 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3385 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3389 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3392 /* For pointer data type, suggest pointer register. */
3393 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3394 mark_reg_pointer (parmreg
,
3395 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3398 /* A subroutine of assign_parms. Allocate stack space to hold the current
3399 parameter. Get it there. Perform all ABI specified conversions. */
3402 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3403 struct assign_parm_data_one
*data
)
3405 /* Value must be stored in the stack slot STACK_PARM during function
3407 bool to_conversion
= false;
3409 assign_parm_remove_parallels (data
);
3411 if (data
->promoted_mode
!= data
->nominal_mode
)
3413 /* Conversion is required. */
3414 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3416 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3418 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3419 to_conversion
= true;
3421 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3422 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3424 if (data
->stack_parm
)
3426 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3427 GET_MODE (data
->stack_parm
));
3428 /* ??? This may need a big-endian conversion on sparc64. */
3430 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3431 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3432 set_mem_offset (data
->stack_parm
,
3433 MEM_OFFSET (data
->stack_parm
) + offset
);
3437 if (data
->entry_parm
!= data
->stack_parm
)
3441 if (data
->stack_parm
== 0)
3443 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3444 GET_MODE (data
->entry_parm
),
3445 TYPE_ALIGN (data
->passed_type
));
3447 = assign_stack_local (GET_MODE (data
->entry_parm
),
3448 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3450 set_mem_attributes (data
->stack_parm
, parm
, 1);
3453 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3454 src
= validize_mem (copy_rtx (data
->entry_parm
));
3458 /* Use a block move to handle potentially misaligned entry_parm. */
3460 push_to_sequence2 (all
->first_conversion_insn
,
3461 all
->last_conversion_insn
);
3462 to_conversion
= true;
3464 emit_block_move (dest
, src
,
3465 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3471 src
= force_reg (GET_MODE (src
), src
);
3472 emit_move_insn (dest
, src
);
3478 all
->first_conversion_insn
= get_insns ();
3479 all
->last_conversion_insn
= get_last_insn ();
3483 set_parm_rtl (parm
, data
->stack_parm
);
3486 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3487 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3490 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3494 tree orig_fnargs
= all
->orig_fnargs
;
3497 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3499 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3500 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3502 rtx tmp
, real
, imag
;
3503 machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3505 real
= DECL_RTL (fnargs
[i
]);
3506 imag
= DECL_RTL (fnargs
[i
+ 1]);
3507 if (inner
!= GET_MODE (real
))
3509 real
= gen_lowpart_SUBREG (inner
, real
);
3510 imag
= gen_lowpart_SUBREG (inner
, imag
);
3513 if (TREE_ADDRESSABLE (parm
))
3516 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3517 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3519 TYPE_ALIGN (TREE_TYPE (parm
)));
3521 /* split_complex_arg put the real and imag parts in
3522 pseudos. Move them to memory. */
3523 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3524 set_mem_attributes (tmp
, parm
, 1);
3525 rmem
= adjust_address_nv (tmp
, inner
, 0);
3526 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3527 push_to_sequence2 (all
->first_conversion_insn
,
3528 all
->last_conversion_insn
);
3529 emit_move_insn (rmem
, real
);
3530 emit_move_insn (imem
, imag
);
3531 all
->first_conversion_insn
= get_insns ();
3532 all
->last_conversion_insn
= get_last_insn ();
3536 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3537 set_parm_rtl (parm
, tmp
);
3539 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3540 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3541 if (inner
!= GET_MODE (real
))
3543 real
= gen_lowpart_SUBREG (inner
, real
);
3544 imag
= gen_lowpart_SUBREG (inner
, imag
);
3546 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3547 set_decl_incoming_rtl (parm
, tmp
, false);
3553 /* Load bounds of PARM from bounds table. */
3555 assign_parm_load_bounds (struct assign_parm_data_one
*data
,
3561 unsigned i
, offs
= 0;
3563 rtx slot
= NULL
, ptr
= NULL
;
3568 bitmap_obstack_initialize (NULL
);
3569 slots
= BITMAP_ALLOC (NULL
);
3570 chkp_find_bound_slots (TREE_TYPE (parm
), slots
);
3571 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, i
, bi
)
3581 BITMAP_FREE (slots
);
3582 bitmap_obstack_release (NULL
);
3585 /* We may have bounds not associated with any pointer. */
3587 offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
3589 /* Find associated pointer. */
3592 /* If bounds are not associated with any bounds,
3593 then it is passed in a register or special slot. */
3594 gcc_assert (data
->entry_parm
);
3597 else if (MEM_P (entry
))
3598 slot
= adjust_address (entry
, Pmode
, offs
);
3599 else if (REG_P (entry
))
3600 ptr
= gen_rtx_REG (Pmode
, REGNO (entry
) + bnd_no
);
3601 else if (GET_CODE (entry
) == PARALLEL
)
3602 ptr
= chkp_get_value_with_offs (entry
, GEN_INT (offs
));
3605 data
->entry_parm
= targetm
.calls
.load_bounds_for_arg (slot
, ptr
,
3609 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3612 assign_bounds (vec
<bounds_parm_data
> &bndargs
,
3613 struct assign_parm_data_all
&all
,
3614 bool assign_regs
, bool assign_special
,
3618 bounds_parm_data
*pbdata
;
3620 if (!bndargs
.exists ())
3623 /* We make few passes to store input bounds. Firstly handle bounds
3624 passed in registers. After that we load bounds passed in special
3625 slots. Finally we load bounds from Bounds Table. */
3626 for (pass
= 0; pass
< 3; pass
++)
3627 FOR_EACH_VEC_ELT (bndargs
, i
, pbdata
)
3629 /* Pass 0 => regs only. */
3632 ||(!pbdata
->parm_data
.entry_parm
3633 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)))
3635 /* Pass 1 => slots only. */
3638 || (!pbdata
->parm_data
.entry_parm
3639 || GET_CODE (pbdata
->parm_data
.entry_parm
) == REG
)))
3641 /* Pass 2 => BT only. */
3644 || pbdata
->parm_data
.entry_parm
))
3647 if (!pbdata
->parm_data
.entry_parm
3648 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)
3649 assign_parm_load_bounds (&pbdata
->parm_data
, pbdata
->ptr_parm
,
3650 pbdata
->ptr_entry
, pbdata
->bound_no
);
3652 set_decl_incoming_rtl (pbdata
->bounds_parm
,
3653 pbdata
->parm_data
.entry_parm
, false);
3655 if (assign_parm_setup_block_p (&pbdata
->parm_data
))
3656 assign_parm_setup_block (&all
, pbdata
->bounds_parm
,
3657 &pbdata
->parm_data
);
3658 else if (pbdata
->parm_data
.passed_pointer
3659 || use_register_for_decl (pbdata
->bounds_parm
))
3660 assign_parm_setup_reg (&all
, pbdata
->bounds_parm
,
3661 &pbdata
->parm_data
);
3663 assign_parm_setup_stack (&all
, pbdata
->bounds_parm
,
3664 &pbdata
->parm_data
);
3668 /* Assign RTL expressions to the function's parameters. This may involve
3669 copying them into registers and using those registers as the DECL_RTL. */
3672 assign_parms (tree fndecl
)
3674 struct assign_parm_data_all all
;
3677 unsigned i
, bound_no
= 0;
3678 tree last_arg
= NULL
;
3679 rtx last_arg_entry
= NULL
;
3680 vec
<bounds_parm_data
> bndargs
= vNULL
;
3681 bounds_parm_data bdata
;
3683 crtl
->args
.internal_arg_pointer
3684 = targetm
.calls
.internal_arg_pointer ();
3686 assign_parms_initialize_all (&all
);
3687 fnargs
= assign_parms_augmented_arg_list (&all
);
3689 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3691 struct assign_parm_data_one data
;
3693 /* Extract the type of PARM; adjust it according to ABI. */
3694 assign_parm_find_data_types (&all
, parm
, &data
);
3696 /* Early out for errors and void parameters. */
3697 if (data
.passed_mode
== VOIDmode
)
3699 SET_DECL_RTL (parm
, const0_rtx
);
3700 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3704 /* Estimate stack alignment from parameter alignment. */
3705 if (SUPPORTS_STACK_ALIGNMENT
)
3708 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3710 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3712 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3713 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3714 TYPE_MODE (data
.nominal_type
),
3715 TYPE_ALIGN (data
.nominal_type
));
3716 if (crtl
->stack_alignment_estimated
< align
)
3718 gcc_assert (!crtl
->stack_realign_processed
);
3719 crtl
->stack_alignment_estimated
= align
;
3723 /* Find out where the parameter arrives in this function. */
3724 assign_parm_find_entry_rtl (&all
, &data
);
3726 /* Find out where stack space for this parameter might be. */
3727 if (assign_parm_is_stack_parm (&all
, &data
))
3729 assign_parm_find_stack_rtl (parm
, &data
);
3730 assign_parm_adjust_entry_rtl (&data
);
3732 if (!POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3734 /* Remember where last non bounds arg was passed in case
3735 we have to load associated bounds for it from Bounds
3738 last_arg_entry
= data
.entry_parm
;
3741 /* Record permanently how this parm was passed. */
3742 if (data
.passed_pointer
)
3745 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3747 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3750 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3752 assign_parm_adjust_stack_rtl (&data
);
3754 /* Bounds should be loaded in the particular order to
3755 have registers allocated correctly. Collect info about
3756 input bounds and load them later. */
3757 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3759 /* Expect bounds in instrumented functions only. */
3760 gcc_assert (chkp_function_instrumented_p (fndecl
));
3762 bdata
.parm_data
= data
;
3763 bdata
.bounds_parm
= parm
;
3764 bdata
.ptr_parm
= last_arg
;
3765 bdata
.ptr_entry
= last_arg_entry
;
3766 bdata
.bound_no
= bound_no
;
3767 bndargs
.safe_push (bdata
);
3771 if (assign_parm_setup_block_p (&data
))
3772 assign_parm_setup_block (&all
, parm
, &data
);
3773 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3774 assign_parm_setup_reg (&all
, parm
, &data
);
3776 assign_parm_setup_stack (&all
, parm
, &data
);
3779 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3781 int pretend_bytes
= 0;
3783 assign_parms_setup_varargs (&all
, &data
, false);
3785 if (chkp_function_instrumented_p (fndecl
))
3787 /* We expect this is the last parm. Otherwise it is wrong
3788 to assign bounds right now. */
3789 gcc_assert (i
== (fnargs
.length () - 1));
3790 assign_bounds (bndargs
, all
, true, false, false);
3791 targetm
.calls
.setup_incoming_vararg_bounds (all
.args_so_far
,
3796 assign_bounds (bndargs
, all
, false, true, true);
3801 /* Update info on where next arg arrives in registers. */
3802 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3803 data
.passed_type
, data
.named_arg
);
3805 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3809 assign_bounds (bndargs
, all
, true, true, true);
3812 if (targetm
.calls
.split_complex_arg
)
3813 assign_parms_unsplit_complex (&all
, fnargs
);
3817 /* Output all parameter conversion instructions (possibly including calls)
3818 now that all parameters have been copied out of hard registers. */
3819 emit_insn (all
.first_conversion_insn
);
3821 /* Estimate reload stack alignment from scalar return mode. */
3822 if (SUPPORTS_STACK_ALIGNMENT
)
3824 if (DECL_RESULT (fndecl
))
3826 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3827 machine_mode mode
= TYPE_MODE (type
);
3831 && !AGGREGATE_TYPE_P (type
))
3833 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3834 if (crtl
->stack_alignment_estimated
< align
)
3836 gcc_assert (!crtl
->stack_realign_processed
);
3837 crtl
->stack_alignment_estimated
= align
;
3843 /* If we are receiving a struct value address as the first argument, set up
3844 the RTL for the function result. As this might require code to convert
3845 the transmitted address to Pmode, we do this here to ensure that possible
3846 preliminary conversions of the address have been emitted already. */
3847 if (all
.function_result_decl
)
3849 tree result
= DECL_RESULT (current_function_decl
);
3850 rtx addr
= DECL_RTL (all
.function_result_decl
);
3853 if (DECL_BY_REFERENCE (result
))
3855 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3860 SET_DECL_VALUE_EXPR (result
,
3861 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3862 all
.function_result_decl
));
3863 addr
= convert_memory_address (Pmode
, addr
);
3864 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3865 set_mem_attributes (x
, result
, 1);
3868 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3870 set_parm_rtl (result
, x
);
3873 /* We have aligned all the args, so add space for the pretend args. */
3874 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3875 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3876 crtl
->args
.size
= all
.stack_args_size
.constant
;
3878 /* Adjust function incoming argument size for alignment and
3881 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3882 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3883 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3885 if (ARGS_GROW_DOWNWARD
)
3887 crtl
->args
.arg_offset_rtx
3888 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3889 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3890 size_int (-all
.stack_args_size
.constant
)),
3891 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3894 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3896 /* See how many bytes, if any, of its args a function should try to pop
3899 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3903 /* For stdarg.h function, save info about
3904 regs and stack space used by the named args. */
3906 crtl
->args
.info
= all
.args_so_far_v
;
3908 /* Set the rtx used for the function return value. Put this in its
3909 own variable so any optimizers that need this information don't have
3910 to include tree.h. Do this here so it gets done when an inlined
3911 function gets output. */
3914 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3915 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3917 /* If scalar return value was computed in a pseudo-reg, or was a named
3918 return value that got dumped to the stack, copy that to the hard
3920 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3922 tree decl_result
= DECL_RESULT (fndecl
);
3923 rtx decl_rtl
= DECL_RTL (decl_result
);
3925 if (REG_P (decl_rtl
)
3926 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3927 : DECL_REGISTER (decl_result
))
3931 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3933 if (chkp_function_instrumented_p (fndecl
))
3935 = targetm
.calls
.chkp_function_value_bounds (TREE_TYPE (decl_result
),
3937 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3938 /* The delay slot scheduler assumes that crtl->return_rtx
3939 holds the hard register containing the return value, not a
3940 temporary pseudo. */
3941 crtl
->return_rtx
= real_decl_rtl
;
3946 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3947 For all seen types, gimplify their sizes. */
3950 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3957 if (POINTER_TYPE_P (t
))
3959 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3960 && !TYPE_SIZES_GIMPLIFIED (t
))
3962 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3970 /* Gimplify the parameter list for current_function_decl. This involves
3971 evaluating SAVE_EXPRs of variable sized parameters and generating code
3972 to implement callee-copies reference parameters. Returns a sequence of
3973 statements to add to the beginning of the function. */
3976 gimplify_parameters (void)
3978 struct assign_parm_data_all all
;
3980 gimple_seq stmts
= NULL
;
3984 assign_parms_initialize_all (&all
);
3985 fnargs
= assign_parms_augmented_arg_list (&all
);
3987 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3989 struct assign_parm_data_one data
;
3991 /* Extract the type of PARM; adjust it according to ABI. */
3992 assign_parm_find_data_types (&all
, parm
, &data
);
3994 /* Early out for errors and void parameters. */
3995 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3998 /* Update info on where next arg arrives in registers. */
3999 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
4000 data
.passed_type
, data
.named_arg
);
4002 /* ??? Once upon a time variable_size stuffed parameter list
4003 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4004 turned out to be less than manageable in the gimple world.
4005 Now we have to hunt them down ourselves. */
4006 walk_tree_without_duplicates (&data
.passed_type
,
4007 gimplify_parm_type
, &stmts
);
4009 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
4011 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
4012 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
4015 if (data
.passed_pointer
)
4017 tree type
= TREE_TYPE (data
.passed_type
);
4018 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
4019 type
, data
.named_arg
))
4023 /* For constant-sized objects, this is trivial; for
4024 variable-sized objects, we have to play games. */
4025 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
4026 && !(flag_stack_check
== GENERIC_STACK_CHECK
4027 && compare_tree_int (DECL_SIZE_UNIT (parm
),
4028 STACK_CHECK_MAX_VAR_SIZE
) > 0))
4030 local
= create_tmp_var (type
, get_name (parm
));
4031 DECL_IGNORED_P (local
) = 0;
4032 /* If PARM was addressable, move that flag over
4033 to the local copy, as its address will be taken,
4034 not the PARMs. Keep the parms address taken
4035 as we'll query that flag during gimplification. */
4036 if (TREE_ADDRESSABLE (parm
))
4037 TREE_ADDRESSABLE (local
) = 1;
4038 else if (TREE_CODE (type
) == COMPLEX_TYPE
4039 || TREE_CODE (type
) == VECTOR_TYPE
)
4040 DECL_GIMPLE_REG_P (local
) = 1;
4044 tree ptr_type
, addr
;
4046 ptr_type
= build_pointer_type (type
);
4047 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
4048 DECL_IGNORED_P (addr
) = 0;
4049 local
= build_fold_indirect_ref (addr
);
4051 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
4052 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
4053 size_int (DECL_ALIGN (parm
)));
4055 /* The call has been built for a variable-sized object. */
4056 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
4057 t
= fold_convert (ptr_type
, t
);
4058 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
4059 gimplify_and_add (t
, &stmts
);
4062 gimplify_assign (local
, parm
, &stmts
);
4064 SET_DECL_VALUE_EXPR (parm
, local
);
4065 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
4075 /* Compute the size and offset from the start of the stacked arguments for a
4076 parm passed in mode PASSED_MODE and with type TYPE.
4078 INITIAL_OFFSET_PTR points to the current offset into the stacked
4081 The starting offset and size for this parm are returned in
4082 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4083 nonzero, the offset is that of stack slot, which is returned in
4084 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4085 padding required from the initial offset ptr to the stack slot.
4087 IN_REGS is nonzero if the argument will be passed in registers. It will
4088 never be set if REG_PARM_STACK_SPACE is not defined.
4090 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4091 for arguments which are passed in registers.
4093 FNDECL is the function in which the argument was defined.
4095 There are two types of rounding that are done. The first, controlled by
4096 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4097 argument list to be aligned to the specific boundary (in bits). This
4098 rounding affects the initial and starting offsets, but not the argument
4101 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4102 optionally rounds the size of the parm to PARM_BOUNDARY. The
4103 initial offset is not affected by this rounding, while the size always
4104 is and the starting offset may be. */
4106 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4107 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4108 callers pass in the total size of args so far as
4109 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4112 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
4113 int reg_parm_stack_space
, int partial
,
4114 tree fndecl ATTRIBUTE_UNUSED
,
4115 struct args_size
*initial_offset_ptr
,
4116 struct locate_and_pad_arg_data
*locate
)
4119 enum direction where_pad
;
4120 unsigned int boundary
, round_boundary
;
4121 int part_size_in_regs
;
4123 /* If we have found a stack parm before we reach the end of the
4124 area reserved for registers, skip that area. */
4127 if (reg_parm_stack_space
> 0)
4129 if (initial_offset_ptr
->var
)
4131 initial_offset_ptr
->var
4132 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4133 ssize_int (reg_parm_stack_space
));
4134 initial_offset_ptr
->constant
= 0;
4136 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4137 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4141 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4144 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4145 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4146 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4147 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4149 locate
->where_pad
= where_pad
;
4151 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4152 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4153 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4155 locate
->boundary
= boundary
;
4157 if (SUPPORTS_STACK_ALIGNMENT
)
4159 /* stack_alignment_estimated can't change after stack has been
4161 if (crtl
->stack_alignment_estimated
< boundary
)
4163 if (!crtl
->stack_realign_processed
)
4164 crtl
->stack_alignment_estimated
= boundary
;
4167 /* If stack is realigned and stack alignment value
4168 hasn't been finalized, it is OK not to increase
4169 stack_alignment_estimated. The bigger alignment
4170 requirement is recorded in stack_alignment_needed
4172 gcc_assert (!crtl
->stack_realign_finalized
4173 && crtl
->stack_realign_needed
);
4178 /* Remember if the outgoing parameter requires extra alignment on the
4179 calling function side. */
4180 if (crtl
->stack_alignment_needed
< boundary
)
4181 crtl
->stack_alignment_needed
= boundary
;
4182 if (crtl
->preferred_stack_boundary
< boundary
)
4183 crtl
->preferred_stack_boundary
= boundary
;
4185 if (ARGS_GROW_DOWNWARD
)
4187 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4188 if (initial_offset_ptr
->var
)
4189 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4190 initial_offset_ptr
->var
);
4194 if (where_pad
!= none
4195 && (!tree_fits_uhwi_p (sizetree
)
4196 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4197 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4198 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4201 locate
->slot_offset
.constant
+= part_size_in_regs
;
4203 if (!in_regs
|| reg_parm_stack_space
> 0)
4204 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4205 &locate
->alignment_pad
);
4207 locate
->size
.constant
= (-initial_offset_ptr
->constant
4208 - locate
->slot_offset
.constant
);
4209 if (initial_offset_ptr
->var
)
4210 locate
->size
.var
= size_binop (MINUS_EXPR
,
4211 size_binop (MINUS_EXPR
,
4213 initial_offset_ptr
->var
),
4214 locate
->slot_offset
.var
);
4216 /* Pad_below needs the pre-rounded size to know how much to pad
4218 locate
->offset
= locate
->slot_offset
;
4219 if (where_pad
== downward
)
4220 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4225 if (!in_regs
|| reg_parm_stack_space
> 0)
4226 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4227 &locate
->alignment_pad
);
4228 locate
->slot_offset
= *initial_offset_ptr
;
4230 #ifdef PUSH_ROUNDING
4231 if (passed_mode
!= BLKmode
)
4232 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4235 /* Pad_below needs the pre-rounded size to know how much to pad below
4236 so this must be done before rounding up. */
4237 locate
->offset
= locate
->slot_offset
;
4238 if (where_pad
== downward
)
4239 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4241 if (where_pad
!= none
4242 && (!tree_fits_uhwi_p (sizetree
)
4243 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4244 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4246 ADD_PARM_SIZE (locate
->size
, sizetree
);
4248 locate
->size
.constant
-= part_size_in_regs
;
4251 #ifdef FUNCTION_ARG_OFFSET
4252 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
4256 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4257 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4260 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4261 struct args_size
*alignment_pad
)
4263 tree save_var
= NULL_TREE
;
4264 HOST_WIDE_INT save_constant
= 0;
4265 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4266 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
4268 #ifdef SPARC_STACK_BOUNDARY_HACK
4269 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4270 the real alignment of %sp. However, when it does this, the
4271 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4272 if (SPARC_STACK_BOUNDARY_HACK
)
4276 if (boundary
> PARM_BOUNDARY
)
4278 save_var
= offset_ptr
->var
;
4279 save_constant
= offset_ptr
->constant
;
4282 alignment_pad
->var
= NULL_TREE
;
4283 alignment_pad
->constant
= 0;
4285 if (boundary
> BITS_PER_UNIT
)
4287 if (offset_ptr
->var
)
4289 tree sp_offset_tree
= ssize_int (sp_offset
);
4290 tree offset
= size_binop (PLUS_EXPR
,
4291 ARGS_SIZE_TREE (*offset_ptr
),
4294 if (ARGS_GROW_DOWNWARD
)
4295 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4297 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4299 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4300 /* ARGS_SIZE_TREE includes constant term. */
4301 offset_ptr
->constant
= 0;
4302 if (boundary
> PARM_BOUNDARY
)
4303 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4308 offset_ptr
->constant
= -sp_offset
+
4310 ? FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
)
4311 : CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
));
4313 if (boundary
> PARM_BOUNDARY
)
4314 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4320 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4322 if (passed_mode
!= BLKmode
)
4324 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4325 offset_ptr
->constant
4326 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4327 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4328 - GET_MODE_SIZE (passed_mode
));
4332 if (TREE_CODE (sizetree
) != INTEGER_CST
4333 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4335 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4336 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4338 ADD_PARM_SIZE (*offset_ptr
, s2
);
4339 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4345 /* True if register REGNO was alive at a place where `setjmp' was
4346 called and was set more than once or is an argument. Such regs may
4347 be clobbered by `longjmp'. */
4350 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4352 /* There appear to be cases where some local vars never reach the
4353 backend but have bogus regnos. */
4354 if (regno
>= max_reg_num ())
4357 return ((REG_N_SETS (regno
) > 1
4358 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4360 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4363 /* Walk the tree of blocks describing the binding levels within a
4364 function and warn about variables the might be killed by setjmp or
4365 vfork. This is done after calling flow_analysis before register
4366 allocation since that will clobber the pseudo-regs to hard
4370 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4374 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4377 && DECL_RTL_SET_P (decl
)
4378 && REG_P (DECL_RTL (decl
))
4379 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4380 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4381 " %<longjmp%> or %<vfork%>", decl
);
4384 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4385 setjmp_vars_warning (setjmp_crosses
, sub
);
4388 /* Do the appropriate part of setjmp_vars_warning
4389 but for arguments instead of local variables. */
4392 setjmp_args_warning (bitmap setjmp_crosses
)
4395 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4396 decl
; decl
= DECL_CHAIN (decl
))
4397 if (DECL_RTL (decl
) != 0
4398 && REG_P (DECL_RTL (decl
))
4399 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4400 warning (OPT_Wclobbered
,
4401 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4405 /* Generate warning messages for variables live across setjmp. */
4408 generate_setjmp_warnings (void)
4410 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4412 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4413 || bitmap_empty_p (setjmp_crosses
))
4416 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4417 setjmp_args_warning (setjmp_crosses
);
4421 /* Reverse the order of elements in the fragment chain T of blocks,
4422 and return the new head of the chain (old last element).
4423 In addition to that clear BLOCK_SAME_RANGE flags when needed
4424 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4425 its super fragment origin. */
4428 block_fragments_nreverse (tree t
)
4430 tree prev
= 0, block
, next
, prev_super
= 0;
4431 tree super
= BLOCK_SUPERCONTEXT (t
);
4432 if (BLOCK_FRAGMENT_ORIGIN (super
))
4433 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4434 for (block
= t
; block
; block
= next
)
4436 next
= BLOCK_FRAGMENT_CHAIN (block
);
4437 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4438 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4439 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4441 BLOCK_SAME_RANGE (block
) = 0;
4442 prev_super
= BLOCK_SUPERCONTEXT (block
);
4443 BLOCK_SUPERCONTEXT (block
) = super
;
4446 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4447 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4449 BLOCK_SAME_RANGE (t
) = 0;
4450 BLOCK_SUPERCONTEXT (t
) = super
;
4454 /* Reverse the order of elements in the chain T of blocks,
4455 and return the new head of the chain (old last element).
4456 Also do the same on subblocks and reverse the order of elements
4457 in BLOCK_FRAGMENT_CHAIN as well. */
4460 blocks_nreverse_all (tree t
)
4462 tree prev
= 0, block
, next
;
4463 for (block
= t
; block
; block
= next
)
4465 next
= BLOCK_CHAIN (block
);
4466 BLOCK_CHAIN (block
) = prev
;
4467 if (BLOCK_FRAGMENT_CHAIN (block
)
4468 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4470 BLOCK_FRAGMENT_CHAIN (block
)
4471 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4472 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4473 BLOCK_SAME_RANGE (block
) = 0;
4475 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4482 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4483 and create duplicate blocks. */
4484 /* ??? Need an option to either create block fragments or to create
4485 abstract origin duplicates of a source block. It really depends
4486 on what optimization has been performed. */
4489 reorder_blocks (void)
4491 tree block
= DECL_INITIAL (current_function_decl
);
4493 if (block
== NULL_TREE
)
4496 auto_vec
<tree
, 10> block_stack
;
4498 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4499 clear_block_marks (block
);
4501 /* Prune the old trees away, so that they don't get in the way. */
4502 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4503 BLOCK_CHAIN (block
) = NULL_TREE
;
4505 /* Recreate the block tree from the note nesting. */
4506 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4507 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4510 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4513 clear_block_marks (tree block
)
4517 TREE_ASM_WRITTEN (block
) = 0;
4518 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4519 block
= BLOCK_CHAIN (block
);
4524 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4525 vec
<tree
> *p_block_stack
)
4528 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4530 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4534 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4536 tree block
= NOTE_BLOCK (insn
);
4539 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4543 BLOCK_SAME_RANGE (prev_end
) = 0;
4544 prev_end
= NULL_TREE
;
4546 /* If we have seen this block before, that means it now
4547 spans multiple address regions. Create a new fragment. */
4548 if (TREE_ASM_WRITTEN (block
))
4550 tree new_block
= copy_node (block
);
4552 BLOCK_SAME_RANGE (new_block
) = 0;
4553 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4554 BLOCK_FRAGMENT_CHAIN (new_block
)
4555 = BLOCK_FRAGMENT_CHAIN (origin
);
4556 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4558 NOTE_BLOCK (insn
) = new_block
;
4562 if (prev_beg
== current_block
&& prev_beg
)
4563 BLOCK_SAME_RANGE (block
) = 1;
4567 BLOCK_SUBBLOCKS (block
) = 0;
4568 TREE_ASM_WRITTEN (block
) = 1;
4569 /* When there's only one block for the entire function,
4570 current_block == block and we mustn't do this, it
4571 will cause infinite recursion. */
4572 if (block
!= current_block
)
4575 if (block
!= origin
)
4576 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4577 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4580 if (p_block_stack
->is_empty ())
4581 super
= current_block
;
4584 super
= p_block_stack
->last ();
4585 gcc_assert (super
== current_block
4586 || BLOCK_FRAGMENT_ORIGIN (super
)
4589 BLOCK_SUPERCONTEXT (block
) = super
;
4590 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4591 BLOCK_SUBBLOCKS (current_block
) = block
;
4592 current_block
= origin
;
4594 p_block_stack
->safe_push (block
);
4596 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4598 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4599 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4600 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4601 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4602 prev_beg
= NULL_TREE
;
4603 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4604 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4609 prev_beg
= NULL_TREE
;
4611 BLOCK_SAME_RANGE (prev_end
) = 0;
4612 prev_end
= NULL_TREE
;
4617 /* Reverse the order of elements in the chain T of blocks,
4618 and return the new head of the chain (old last element). */
4621 blocks_nreverse (tree t
)
4623 tree prev
= 0, block
, next
;
4624 for (block
= t
; block
; block
= next
)
4626 next
= BLOCK_CHAIN (block
);
4627 BLOCK_CHAIN (block
) = prev
;
4633 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4634 by modifying the last node in chain 1 to point to chain 2. */
4637 block_chainon (tree op1
, tree op2
)
4646 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4648 BLOCK_CHAIN (t1
) = op2
;
4650 #ifdef ENABLE_TREE_CHECKING
4653 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4654 gcc_assert (t2
!= t1
);
4661 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4662 non-NULL, list them all into VECTOR, in a depth-first preorder
4663 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4667 all_blocks (tree block
, tree
*vector
)
4673 TREE_ASM_WRITTEN (block
) = 0;
4675 /* Record this block. */
4677 vector
[n_blocks
] = block
;
4681 /* Record the subblocks, and their subblocks... */
4682 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4683 vector
? vector
+ n_blocks
: 0);
4684 block
= BLOCK_CHAIN (block
);
4690 /* Return a vector containing all the blocks rooted at BLOCK. The
4691 number of elements in the vector is stored in N_BLOCKS_P. The
4692 vector is dynamically allocated; it is the caller's responsibility
4693 to call `free' on the pointer returned. */
4696 get_block_vector (tree block
, int *n_blocks_p
)
4700 *n_blocks_p
= all_blocks (block
, NULL
);
4701 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4702 all_blocks (block
, block_vector
);
4704 return block_vector
;
4707 static GTY(()) int next_block_index
= 2;
4709 /* Set BLOCK_NUMBER for all the blocks in FN. */
4712 number_blocks (tree fn
)
4718 /* For SDB and XCOFF debugging output, we start numbering the blocks
4719 from 1 within each function, rather than keeping a running
4721 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
4722 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4723 next_block_index
= 1;
4726 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4728 /* The top-level BLOCK isn't numbered at all. */
4729 for (i
= 1; i
< n_blocks
; ++i
)
4730 /* We number the blocks from two. */
4731 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4733 free (block_vector
);
4738 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4741 debug_find_var_in_block_tree (tree var
, tree block
)
4745 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4749 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4751 tree ret
= debug_find_var_in_block_tree (var
, t
);
4759 /* Keep track of whether we're in a dummy function context. If we are,
4760 we don't want to invoke the set_current_function hook, because we'll
4761 get into trouble if the hook calls target_reinit () recursively or
4762 when the initial initialization is not yet complete. */
4764 static bool in_dummy_function
;
4766 /* Invoke the target hook when setting cfun. Update the optimization options
4767 if the function uses different options than the default. */
4770 invoke_set_current_function_hook (tree fndecl
)
4772 if (!in_dummy_function
)
4774 tree opts
= ((fndecl
)
4775 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4776 : optimization_default_node
);
4779 opts
= optimization_default_node
;
4781 /* Change optimization options if needed. */
4782 if (optimization_current_node
!= opts
)
4784 optimization_current_node
= opts
;
4785 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4788 targetm
.set_current_function (fndecl
);
4789 this_fn_optabs
= this_target_optabs
;
4791 if (opts
!= optimization_default_node
)
4793 init_tree_optimization_optabs (opts
);
4794 if (TREE_OPTIMIZATION_OPTABS (opts
))
4795 this_fn_optabs
= (struct target_optabs
*)
4796 TREE_OPTIMIZATION_OPTABS (opts
);
4801 /* cfun should never be set directly; use this function. */
4804 set_cfun (struct function
*new_cfun
)
4806 if (cfun
!= new_cfun
)
4809 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4810 redirect_edge_var_map_empty ();
4814 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4816 static vec
<function
*> cfun_stack
;
4818 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4819 current_function_decl accordingly. */
4822 push_cfun (struct function
*new_cfun
)
4824 gcc_assert ((!cfun
&& !current_function_decl
)
4825 || (cfun
&& current_function_decl
== cfun
->decl
));
4826 cfun_stack
.safe_push (cfun
);
4827 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4828 set_cfun (new_cfun
);
4831 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4836 struct function
*new_cfun
= cfun_stack
.pop ();
4837 /* When in_dummy_function, we do have a cfun but current_function_decl is
4838 NULL. We also allow pushing NULL cfun and subsequently changing
4839 current_function_decl to something else and have both restored by
4841 gcc_checking_assert (in_dummy_function
4843 || current_function_decl
== cfun
->decl
);
4844 set_cfun (new_cfun
);
4845 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4848 /* Return value of funcdef and increase it. */
4850 get_next_funcdef_no (void)
4852 return funcdef_no
++;
4855 /* Return value of funcdef. */
4857 get_last_funcdef_no (void)
4862 /* Allocate a function structure for FNDECL and set its contents
4863 to the defaults. Set cfun to the newly-allocated object.
4864 Some of the helper functions invoked during initialization assume
4865 that cfun has already been set. Therefore, assign the new object
4866 directly into cfun and invoke the back end hook explicitly at the
4867 very end, rather than initializing a temporary and calling set_cfun
4870 ABSTRACT_P is true if this is a function that will never be seen by
4871 the middle-end. Such functions are front-end concepts (like C++
4872 function templates) that do not correspond directly to functions
4873 placed in object files. */
4876 allocate_struct_function (tree fndecl
, bool abstract_p
)
4878 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4880 cfun
= ggc_cleared_alloc
<function
> ();
4882 init_eh_for_function ();
4884 if (init_machine_status
)
4885 cfun
->machine
= (*init_machine_status
) ();
4887 #ifdef OVERRIDE_ABI_FORMAT
4888 OVERRIDE_ABI_FORMAT (fndecl
);
4891 if (fndecl
!= NULL_TREE
)
4893 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4894 cfun
->decl
= fndecl
;
4895 current_function_funcdef_no
= get_next_funcdef_no ();
4898 invoke_set_current_function_hook (fndecl
);
4900 if (fndecl
!= NULL_TREE
)
4902 tree result
= DECL_RESULT (fndecl
);
4906 /* Now that we have activated any function-specific attributes
4907 that might affect layout, particularly vector modes, relayout
4908 each of the parameters and the result. */
4909 relayout_decl (result
);
4910 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4911 parm
= DECL_CHAIN (parm
))
4912 relayout_decl (parm
);
4914 /* Similarly relayout the function decl. */
4915 targetm
.target_option
.relayout_function (fndecl
);
4918 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4920 #ifdef PCC_STATIC_STRUCT_RETURN
4921 cfun
->returns_pcc_struct
= 1;
4923 cfun
->returns_struct
= 1;
4926 cfun
->stdarg
= stdarg_p (fntype
);
4928 /* Assume all registers in stdarg functions need to be saved. */
4929 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4930 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4932 /* ??? This could be set on a per-function basis by the front-end
4933 but is this worth the hassle? */
4934 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4935 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4937 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4938 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4942 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4943 instead of just setting it. */
4946 push_struct_function (tree fndecl
)
4948 /* When in_dummy_function we might be in the middle of a pop_cfun and
4949 current_function_decl and cfun may not match. */
4950 gcc_assert (in_dummy_function
4951 || (!cfun
&& !current_function_decl
)
4952 || (cfun
&& current_function_decl
== cfun
->decl
));
4953 cfun_stack
.safe_push (cfun
);
4954 current_function_decl
= fndecl
;
4955 allocate_struct_function (fndecl
, false);
4958 /* Reset crtl and other non-struct-function variables to defaults as
4959 appropriate for emitting rtl at the start of a function. */
4962 prepare_function_start (void)
4964 gcc_assert (!get_last_insn ());
4967 init_varasm_status ();
4969 default_rtl_profile ();
4971 if (flag_stack_usage_info
)
4973 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4974 cfun
->su
->static_stack_size
= -1;
4977 cse_not_expected
= ! optimize
;
4979 /* Caller save not needed yet. */
4980 caller_save_needed
= 0;
4982 /* We haven't done register allocation yet. */
4985 /* Indicate that we have not instantiated virtual registers yet. */
4986 virtuals_instantiated
= 0;
4988 /* Indicate that we want CONCATs now. */
4989 generating_concat_p
= 1;
4991 /* Indicate we have no need of a frame pointer yet. */
4992 frame_pointer_needed
= 0;
4996 push_dummy_function (bool with_decl
)
4998 tree fn_decl
, fn_type
, fn_result_decl
;
5000 gcc_assert (!in_dummy_function
);
5001 in_dummy_function
= true;
5005 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
5006 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
5008 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
5009 NULL_TREE
, void_type_node
);
5010 DECL_RESULT (fn_decl
) = fn_result_decl
;
5013 fn_decl
= NULL_TREE
;
5015 push_struct_function (fn_decl
);
5018 /* Initialize the rtl expansion mechanism so that we can do simple things
5019 like generate sequences. This is used to provide a context during global
5020 initialization of some passes. You must call expand_dummy_function_end
5021 to exit this context. */
5024 init_dummy_function_start (void)
5026 push_dummy_function (false);
5027 prepare_function_start ();
5030 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5031 and initialize static variables for generating RTL for the statements
5035 init_function_start (tree subr
)
5037 /* Initialize backend, if needed. */
5040 prepare_function_start ();
5041 decide_function_section (subr
);
5043 /* Warn if this value is an aggregate type,
5044 regardless of which calling convention we are using for it. */
5045 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5046 warning (OPT_Waggregate_return
, "function returns an aggregate");
5049 /* Expand code to verify the stack_protect_guard. This is invoked at
5050 the end of a function to be protected. */
5053 stack_protect_epilogue (void)
5055 tree guard_decl
= targetm
.stack_protect_guard ();
5056 rtx_code_label
*label
= gen_label_rtx ();
5060 x
= expand_normal (crtl
->stack_protect_guard
);
5062 y
= expand_normal (guard_decl
);
5066 /* Allow the target to compare Y with X without leaking either into
5068 if (targetm
.have_stack_protect_test ()
5069 && ((seq
= targetm
.gen_stack_protect_test (x
, y
, label
)) != NULL_RTX
))
5072 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
5074 /* The noreturn predictor has been moved to the tree level. The rtl-level
5075 predictors estimate this branch about 20%, which isn't enough to get
5076 things moved out of line. Since this is the only extant case of adding
5077 a noreturn function at the rtl level, it doesn't seem worth doing ought
5078 except adding the prediction by hand. */
5079 rtx_insn
*tmp
= get_last_insn ();
5081 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
5083 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
5088 /* Start the RTL for a new function, and set variables used for
5090 SUBR is the FUNCTION_DECL node.
5091 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5092 the function's parameters, which must be run at any return statement. */
5095 expand_function_start (tree subr
)
5097 /* Make sure volatile mem refs aren't considered
5098 valid operands of arithmetic insns. */
5099 init_recog_no_volatile ();
5103 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5106 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5108 /* Make the label for return statements to jump to. Do not special
5109 case machines with special return instructions -- they will be
5110 handled later during jump, ifcvt, or epilogue creation. */
5111 return_label
= gen_label_rtx ();
5113 /* Initialize rtx used to return the value. */
5114 /* Do this before assign_parms so that we copy the struct value address
5115 before any library calls that assign parms might generate. */
5117 /* Decide whether to return the value in memory or in a register. */
5118 tree res
= DECL_RESULT (subr
);
5119 if (aggregate_value_p (res
, subr
))
5121 /* Returning something that won't go in a register. */
5122 rtx value_address
= 0;
5124 #ifdef PCC_STATIC_STRUCT_RETURN
5125 if (cfun
->returns_pcc_struct
)
5127 int size
= int_size_in_bytes (TREE_TYPE (res
));
5128 value_address
= assemble_static_space (size
);
5133 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5134 /* Expect to be passed the address of a place to store the value.
5135 If it is passed as an argument, assign_parms will take care of
5139 value_address
= gen_reg_rtx (Pmode
);
5140 emit_move_insn (value_address
, sv
);
5145 rtx x
= value_address
;
5146 if (!DECL_BY_REFERENCE (res
))
5148 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5149 set_mem_attributes (x
, res
, 1);
5151 set_parm_rtl (res
, x
);
5154 else if (DECL_MODE (res
) == VOIDmode
)
5155 /* If return mode is void, this decl rtl should not be used. */
5156 set_parm_rtl (res
, NULL_RTX
);
5159 /* Compute the return values into a pseudo reg, which we will copy
5160 into the true return register after the cleanups are done. */
5161 tree return_type
= TREE_TYPE (res
);
5163 /* If we may coalesce this result, make sure it has the expected mode
5164 in case it was promoted. But we need not bother about BLKmode. */
5165 machine_mode promoted_mode
5166 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5167 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5170 if (promoted_mode
!= BLKmode
)
5171 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5172 else if (TYPE_MODE (return_type
) != BLKmode
5173 && targetm
.calls
.return_in_msb (return_type
))
5174 /* expand_function_end will insert the appropriate padding in
5175 this case. Use the return value's natural (unpadded) mode
5176 within the function proper. */
5177 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5180 /* In order to figure out what mode to use for the pseudo, we
5181 figure out what the mode of the eventual return register will
5182 actually be, and use that. */
5183 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5185 /* Structures that are returned in registers are not
5186 aggregate_value_p, so we may see a PARALLEL or a REG. */
5187 if (REG_P (hard_reg
))
5188 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5191 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5192 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5196 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5197 result to the real return register(s). */
5198 DECL_REGISTER (res
) = 1;
5200 if (chkp_function_instrumented_p (current_function_decl
))
5202 tree return_type
= TREE_TYPE (res
);
5203 rtx bounds
= targetm
.calls
.chkp_function_value_bounds (return_type
,
5205 SET_DECL_BOUNDS_RTL (res
, bounds
);
5209 /* Initialize rtx for parameters and local variables.
5210 In some cases this requires emitting insns. */
5211 assign_parms (subr
);
5213 /* If function gets a static chain arg, store it. */
5214 if (cfun
->static_chain_decl
)
5216 tree parm
= cfun
->static_chain_decl
;
5221 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5222 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5224 set_decl_incoming_rtl (parm
, chain
, false);
5225 set_parm_rtl (parm
, local
);
5226 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5228 if (GET_MODE (local
) != GET_MODE (chain
))
5230 convert_move (local
, chain
, unsignedp
);
5231 insn
= get_last_insn ();
5234 insn
= emit_move_insn (local
, chain
);
5236 /* Mark the register as eliminable, similar to parameters. */
5238 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5239 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5241 /* If we aren't optimizing, save the static chain onto the stack. */
5244 tree saved_static_chain_decl
5245 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5246 DECL_NAME (parm
), TREE_TYPE (parm
));
5247 rtx saved_static_chain_rtx
5248 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5249 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5250 emit_move_insn (saved_static_chain_rtx
, chain
);
5251 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5252 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5256 /* If the function receives a non-local goto, then store the
5257 bits we need to restore the frame pointer. */
5258 if (cfun
->nonlocal_goto_save_area
)
5263 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5264 gcc_assert (DECL_RTL_SET_P (var
));
5266 t_save
= build4 (ARRAY_REF
,
5267 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5268 cfun
->nonlocal_goto_save_area
,
5269 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5270 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5271 gcc_assert (GET_MODE (r_save
) == Pmode
);
5273 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5274 update_nonlocal_goto_save_area ();
5277 /* The following was moved from init_function_start.
5278 The move is supposed to make sdb output more accurate. */
5279 /* Indicate the beginning of the function body,
5280 as opposed to parm setup. */
5281 emit_note (NOTE_INSN_FUNCTION_BEG
);
5283 gcc_assert (NOTE_P (get_last_insn ()));
5285 parm_birth_insn
= get_last_insn ();
5290 PROFILE_HOOK (current_function_funcdef_no
);
5294 /* If we are doing generic stack checking, the probe should go here. */
5295 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5296 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5300 pop_dummy_function (void)
5303 in_dummy_function
= false;
5306 /* Undo the effects of init_dummy_function_start. */
5308 expand_dummy_function_end (void)
5310 gcc_assert (in_dummy_function
);
5312 /* End any sequences that failed to be closed due to syntax errors. */
5313 while (in_sequence_p ())
5316 /* Outside function body, can't compute type's actual size
5317 until next function's body starts. */
5319 free_after_parsing (cfun
);
5320 free_after_compilation (cfun
);
5321 pop_dummy_function ();
5324 /* Helper for diddle_return_value. */
5327 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5332 if (REG_P (outgoing
))
5333 (*doit
) (outgoing
, arg
);
5334 else if (GET_CODE (outgoing
) == PARALLEL
)
5338 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5340 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5342 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5348 /* Call DOIT for each hard register used as a return value from
5349 the current function. */
5352 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5354 diddle_return_value_1 (doit
, arg
, crtl
->return_bnd
);
5355 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5359 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5365 clobber_return_register (void)
5367 diddle_return_value (do_clobber_return_reg
, NULL
);
5369 /* In case we do use pseudo to return value, clobber it too. */
5370 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5372 tree decl_result
= DECL_RESULT (current_function_decl
);
5373 rtx decl_rtl
= DECL_RTL (decl_result
);
5374 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5376 do_clobber_return_reg (decl_rtl
, NULL
);
5382 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5388 use_return_register (void)
5390 diddle_return_value (do_use_return_reg
, NULL
);
5393 /* Set the location of the insn chain starting at INSN to LOC. */
5396 set_insn_locations (rtx_insn
*insn
, int loc
)
5398 while (insn
!= NULL
)
5401 INSN_LOCATION (insn
) = loc
;
5402 insn
= NEXT_INSN (insn
);
5406 /* Generate RTL for the end of the current function. */
5409 expand_function_end (void)
5411 /* If arg_pointer_save_area was referenced only from a nested
5412 function, we will not have initialized it yet. Do that now. */
5413 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5414 get_arg_pointer_save_area ();
5416 /* If we are doing generic stack checking and this function makes calls,
5417 do a stack probe at the start of the function to ensure we have enough
5418 space for another stack frame. */
5419 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5421 rtx_insn
*insn
, *seq
;
5423 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5426 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5428 if (STACK_CHECK_MOVING_SP
)
5429 anti_adjust_stack_and_probe (max_frame_size
, true);
5431 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5434 set_insn_locations (seq
, prologue_location
);
5435 emit_insn_before (seq
, stack_check_probe_note
);
5440 /* End any sequences that failed to be closed due to syntax errors. */
5441 while (in_sequence_p ())
5444 clear_pending_stack_adjust ();
5445 do_pending_stack_adjust ();
5447 /* Output a linenumber for the end of the function.
5448 SDB depends on this. */
5449 set_curr_insn_location (input_location
);
5451 /* Before the return label (if any), clobber the return
5452 registers so that they are not propagated live to the rest of
5453 the function. This can only happen with functions that drop
5454 through; if there had been a return statement, there would
5455 have either been a return rtx, or a jump to the return label.
5457 We delay actual code generation after the current_function_value_rtx
5459 rtx_insn
*clobber_after
= get_last_insn ();
5461 /* Output the label for the actual return from the function. */
5462 emit_label (return_label
);
5464 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5466 /* Let except.c know where it should emit the call to unregister
5467 the function context for sjlj exceptions. */
5468 if (flag_exceptions
)
5469 sjlj_emit_function_exit_after (get_last_insn ());
5473 /* We want to ensure that instructions that may trap are not
5474 moved into the epilogue by scheduling, because we don't
5475 always emit unwind information for the epilogue. */
5476 if (cfun
->can_throw_non_call_exceptions
)
5477 emit_insn (gen_blockage ());
5480 /* If this is an implementation of throw, do what's necessary to
5481 communicate between __builtin_eh_return and the epilogue. */
5482 expand_eh_return ();
5484 /* If scalar return value was computed in a pseudo-reg, or was a named
5485 return value that got dumped to the stack, copy that to the hard
5487 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5489 tree decl_result
= DECL_RESULT (current_function_decl
);
5490 rtx decl_rtl
= DECL_RTL (decl_result
);
5492 if (REG_P (decl_rtl
)
5493 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5494 : DECL_REGISTER (decl_result
))
5496 rtx real_decl_rtl
= crtl
->return_rtx
;
5498 /* This should be set in assign_parms. */
5499 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5501 /* If this is a BLKmode structure being returned in registers,
5502 then use the mode computed in expand_return. Note that if
5503 decl_rtl is memory, then its mode may have been changed,
5504 but that crtl->return_rtx has not. */
5505 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5506 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5508 /* If a non-BLKmode return value should be padded at the least
5509 significant end of the register, shift it left by the appropriate
5510 amount. BLKmode results are handled using the group load/store
5512 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5513 && REG_P (real_decl_rtl
)
5514 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5516 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5517 REGNO (real_decl_rtl
)),
5519 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5521 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5523 /* If expand_function_start has created a PARALLEL for decl_rtl,
5524 move the result to the real return registers. Otherwise, do
5525 a group load from decl_rtl for a named return. */
5526 if (GET_CODE (decl_rtl
) == PARALLEL
)
5527 emit_group_move (real_decl_rtl
, decl_rtl
);
5529 emit_group_load (real_decl_rtl
, decl_rtl
,
5530 TREE_TYPE (decl_result
),
5531 int_size_in_bytes (TREE_TYPE (decl_result
)));
5533 /* In the case of complex integer modes smaller than a word, we'll
5534 need to generate some non-trivial bitfield insertions. Do that
5535 on a pseudo and not the hard register. */
5536 else if (GET_CODE (decl_rtl
) == CONCAT
5537 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5538 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5540 int old_generating_concat_p
;
5543 old_generating_concat_p
= generating_concat_p
;
5544 generating_concat_p
= 0;
5545 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5546 generating_concat_p
= old_generating_concat_p
;
5548 emit_move_insn (tmp
, decl_rtl
);
5549 emit_move_insn (real_decl_rtl
, tmp
);
5551 /* If a named return value dumped decl_return to memory, then
5552 we may need to re-do the PROMOTE_MODE signed/unsigned
5554 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5556 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5557 promote_function_mode (TREE_TYPE (decl_result
),
5558 GET_MODE (decl_rtl
), &unsignedp
,
5559 TREE_TYPE (current_function_decl
), 1);
5561 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5564 emit_move_insn (real_decl_rtl
, decl_rtl
);
5568 /* If returning a structure, arrange to return the address of the value
5569 in a place where debuggers expect to find it.
5571 If returning a structure PCC style,
5572 the caller also depends on this value.
5573 And cfun->returns_pcc_struct is not necessarily set. */
5574 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5575 && !targetm
.calls
.omit_struct_return_reg
)
5577 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5578 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5581 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5582 type
= TREE_TYPE (type
);
5584 value_address
= XEXP (value_address
, 0);
5586 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5587 current_function_decl
, true);
5589 /* Mark this as a function return value so integrate will delete the
5590 assignment and USE below when inlining this function. */
5591 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5593 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5594 value_address
= convert_memory_address (GET_MODE (outgoing
),
5597 emit_move_insn (outgoing
, value_address
);
5599 /* Show return register used to hold result (in this case the address
5601 crtl
->return_rtx
= outgoing
;
5604 /* Emit the actual code to clobber return register. Don't emit
5605 it if clobber_after is a barrier, then the previous basic block
5606 certainly doesn't fall thru into the exit block. */
5607 if (!BARRIER_P (clobber_after
))
5610 clobber_return_register ();
5611 rtx_insn
*seq
= get_insns ();
5614 emit_insn_after (seq
, clobber_after
);
5617 /* Output the label for the naked return from the function. */
5618 if (naked_return_label
)
5619 emit_label (naked_return_label
);
5621 /* @@@ This is a kludge. We want to ensure that instructions that
5622 may trap are not moved into the epilogue by scheduling, because
5623 we don't always emit unwind information for the epilogue. */
5624 if (cfun
->can_throw_non_call_exceptions
5625 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5626 emit_insn (gen_blockage ());
5628 /* If stack protection is enabled for this function, check the guard. */
5629 if (crtl
->stack_protect_guard
)
5630 stack_protect_epilogue ();
5632 /* If we had calls to alloca, and this machine needs
5633 an accurate stack pointer to exit the function,
5634 insert some code to save and restore the stack pointer. */
5635 if (! EXIT_IGNORE_STACK
5636 && cfun
->calls_alloca
)
5641 emit_stack_save (SAVE_FUNCTION
, &tem
);
5642 rtx_insn
*seq
= get_insns ();
5644 emit_insn_before (seq
, parm_birth_insn
);
5646 emit_stack_restore (SAVE_FUNCTION
, tem
);
5649 /* ??? This should no longer be necessary since stupid is no longer with
5650 us, but there are some parts of the compiler (eg reload_combine, and
5651 sh mach_dep_reorg) that still try and compute their own lifetime info
5652 instead of using the general framework. */
5653 use_return_register ();
5657 get_arg_pointer_save_area (void)
5659 rtx ret
= arg_pointer_save_area
;
5663 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5664 arg_pointer_save_area
= ret
;
5667 if (! crtl
->arg_pointer_save_area_init
)
5669 /* Save the arg pointer at the beginning of the function. The
5670 generated stack slot may not be a valid memory address, so we
5671 have to check it and fix it if necessary. */
5673 emit_move_insn (validize_mem (copy_rtx (ret
)),
5674 crtl
->args
.internal_arg_pointer
);
5675 rtx_insn
*seq
= get_insns ();
5678 push_topmost_sequence ();
5679 emit_insn_after (seq
, entry_of_function ());
5680 pop_topmost_sequence ();
5682 crtl
->arg_pointer_save_area_init
= true;
5688 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5689 for the first time. */
5692 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5695 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5698 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5700 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5702 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5703 gcc_assert (*slot
== NULL
);
5708 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5709 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5710 insn, then record COPY as well. */
5713 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5715 hash_table
<insn_cache_hasher
> *hash
;
5718 hash
= epilogue_insn_hash
;
5719 if (!hash
|| !hash
->find (insn
))
5721 hash
= prologue_insn_hash
;
5722 if (!hash
|| !hash
->find (insn
))
5726 slot
= hash
->find_slot (copy
, INSERT
);
5727 gcc_assert (*slot
== NULL
);
5731 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5732 we can be running after reorg, SEQUENCE rtl is possible. */
5735 contains (const_rtx insn
, hash_table
<insn_cache_hasher
> *hash
)
5740 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5742 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5744 for (i
= seq
->len () - 1; i
>= 0; i
--)
5745 if (hash
->find (seq
->element (i
)))
5750 return hash
->find (const_cast<rtx
> (insn
)) != NULL
;
5754 prologue_epilogue_contains (const_rtx insn
)
5756 if (contains (insn
, prologue_insn_hash
))
5758 if (contains (insn
, epilogue_insn_hash
))
5764 /* Set JUMP_LABEL for a return insn. */
5767 set_return_jump_label (rtx_insn
*returnjump
)
5769 rtx pat
= PATTERN (returnjump
);
5770 if (GET_CODE (pat
) == PARALLEL
)
5771 pat
= XVECEXP (pat
, 0, 0);
5772 if (ANY_RETURN_P (pat
))
5773 JUMP_LABEL (returnjump
) = pat
;
5775 JUMP_LABEL (returnjump
) = ret_rtx
;
5778 /* Return a sequence to be used as the split prologue for the current
5779 function, or NULL. */
5782 make_split_prologue_seq (void)
5784 if (!flag_split_stack
5785 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5789 emit_insn (targetm
.gen_split_stack_prologue ());
5790 rtx_insn
*seq
= get_insns ();
5793 record_insns (seq
, NULL
, &prologue_insn_hash
);
5794 set_insn_locations (seq
, prologue_location
);
5799 /* Return a sequence to be used as the prologue for the current function,
5803 make_prologue_seq (void)
5805 if (!targetm
.have_prologue ())
5809 rtx_insn
*seq
= targetm
.gen_prologue ();
5812 /* Insert an explicit USE for the frame pointer
5813 if the profiling is on and the frame pointer is required. */
5814 if (crtl
->profile
&& frame_pointer_needed
)
5815 emit_use (hard_frame_pointer_rtx
);
5817 /* Retain a map of the prologue insns. */
5818 record_insns (seq
, NULL
, &prologue_insn_hash
);
5819 emit_note (NOTE_INSN_PROLOGUE_END
);
5821 /* Ensure that instructions are not moved into the prologue when
5822 profiling is on. The call to the profiling routine can be
5823 emitted within the live range of a call-clobbered register. */
5824 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5825 emit_insn (gen_blockage ());
5829 set_insn_locations (seq
, prologue_location
);
5834 /* Return a sequence to be used as the epilogue for the current function,
5838 make_epilogue_seq (void)
5840 if (!targetm
.have_epilogue ())
5844 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5845 rtx_insn
*seq
= targetm
.gen_epilogue ();
5847 emit_jump_insn (seq
);
5849 /* Retain a map of the epilogue insns. */
5850 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5851 set_insn_locations (seq
, epilogue_location
);
5854 rtx_insn
*returnjump
= get_last_insn ();
5857 if (JUMP_P (returnjump
))
5858 set_return_jump_label (returnjump
);
5864 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5865 this into place with notes indicating where the prologue ends and where
5866 the epilogue begins. Update the basic block information when possible.
5868 Notes on epilogue placement:
5869 There are several kinds of edges to the exit block:
5870 * a single fallthru edge from LAST_BB
5871 * possibly, edges from blocks containing sibcalls
5872 * possibly, fake edges from infinite loops
5874 The epilogue is always emitted on the fallthru edge from the last basic
5875 block in the function, LAST_BB, into the exit block.
5877 If LAST_BB is empty except for a label, it is the target of every
5878 other basic block in the function that ends in a return. If a
5879 target has a return or simple_return pattern (possibly with
5880 conditional variants), these basic blocks can be changed so that a
5881 return insn is emitted into them, and their target is adjusted to
5882 the real exit block.
5884 Notes on shrink wrapping: We implement a fairly conservative
5885 version of shrink-wrapping rather than the textbook one. We only
5886 generate a single prologue and a single epilogue. This is
5887 sufficient to catch a number of interesting cases involving early
5890 First, we identify the blocks that require the prologue to occur before
5891 them. These are the ones that modify a call-saved register, or reference
5892 any of the stack or frame pointer registers. To simplify things, we then
5893 mark everything reachable from these blocks as also requiring a prologue.
5894 This takes care of loops automatically, and avoids the need to examine
5895 whether MEMs reference the frame, since it is sufficient to check for
5896 occurrences of the stack or frame pointer.
5898 We then compute the set of blocks for which the need for a prologue
5899 is anticipatable (borrowing terminology from the shrink-wrapping
5900 description in Muchnick's book). These are the blocks which either
5901 require a prologue themselves, or those that have only successors
5902 where the prologue is anticipatable. The prologue needs to be
5903 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5904 is not. For the moment, we ensure that only one such edge exists.
5906 The epilogue is placed as described above, but we make a
5907 distinction between inserting return and simple_return patterns
5908 when modifying other blocks that end in a return. Blocks that end
5909 in a sibcall omit the sibcall_epilogue if the block is not in
5913 thread_prologue_and_epilogue_insns (void)
5917 /* Can't deal with multiple successors of the entry block at the
5918 moment. Function should always have at least one entry
5920 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5922 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5923 edge orig_entry_edge
= entry_edge
;
5925 rtx_insn
*prologue_seq
= make_prologue_seq ();
5927 /* Try to perform a kind of shrink-wrapping, making sure the
5928 prologue/epilogue is emitted only around those parts of the
5929 function that require it. */
5930 try_shrink_wrapping (&entry_edge
, prologue_seq
);
5932 /* If the target can handle splitting the prologue/epilogue into separate
5933 components, try to shrink-wrap these components separately. */
5934 try_shrink_wrapping_separate (entry_edge
->dest
);
5936 /* If that did anything for any component we now need the generate the
5937 "main" prologue again. If that does not work for some target then
5938 that target should not enable separate shrink-wrapping. */
5939 if (crtl
->shrink_wrapped_separate
)
5940 prologue_seq
= make_prologue_seq ();
5942 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
5943 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
5945 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5947 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5948 this marker for the splits of EH_RETURN patterns, and nothing else
5949 uses the flag in the meantime. */
5950 epilogue_completed
= 1;
5952 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5953 some targets, these get split to a special version of the epilogue
5954 code. In order to be able to properly annotate these with unwind
5955 info, try to split them now. If we get a valid split, drop an
5956 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5959 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5961 rtx_insn
*prev
, *last
, *trial
;
5963 if (e
->flags
& EDGE_FALLTHRU
)
5965 last
= BB_END (e
->src
);
5966 if (!eh_returnjump_p (last
))
5969 prev
= PREV_INSN (last
);
5970 trial
= try_split (PATTERN (last
), last
, 1);
5974 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5975 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5978 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5980 if (exit_fallthru_edge
)
5984 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
5985 commit_edge_insertions ();
5987 /* The epilogue insns we inserted may cause the exit edge to no longer
5989 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5991 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5992 && returnjump_p (BB_END (e
->src
)))
5993 e
->flags
&= ~EDGE_FALLTHRU
;
5996 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
5998 /* We have a fall-through edge to the exit block, the source is not
5999 at the end of the function, and there will be an assembler epilogue
6000 at the end of the function.
6001 We can't use force_nonfallthru here, because that would try to
6002 use return. Inserting a jump 'by hand' is extremely messy, so
6003 we take advantage of cfg_layout_finalize using
6004 fixup_fallthru_exit_predecessor. */
6005 cfg_layout_initialize (0);
6007 FOR_EACH_BB_FN (cur_bb
, cfun
)
6008 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6009 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6010 cur_bb
->aux
= cur_bb
->next_bb
;
6011 cfg_layout_finalize ();
6015 /* Insert the prologue. */
6017 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6019 if (split_prologue_seq
|| prologue_seq
)
6021 if (split_prologue_seq
)
6022 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6025 insert_insn_on_edge (prologue_seq
, entry_edge
);
6027 commit_edge_insertions ();
6029 /* Look for basic blocks within the prologue insns. */
6030 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
6031 bitmap_clear (blocks
);
6032 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6033 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6034 find_many_sub_basic_blocks (blocks
);
6037 default_rtl_profile ();
6039 /* Emit sibling epilogues before any sibling call sites. */
6040 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6041 (e
= ei_safe_edge (ei
));
6044 /* Skip those already handled, the ones that run without prologue. */
6045 if (e
->flags
& EDGE_IGNORE
)
6047 e
->flags
&= ~EDGE_IGNORE
;
6051 rtx_insn
*insn
= BB_END (e
->src
);
6053 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6056 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6059 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6061 rtx_insn
*seq
= get_insns ();
6064 /* Retain a map of the epilogue insns. Used in life analysis to
6065 avoid getting rid of sibcall epilogue insns. Do this before we
6066 actually emit the sequence. */
6067 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6068 set_insn_locations (seq
, epilogue_location
);
6070 emit_insn_before (seq
, insn
);
6076 rtx_insn
*insn
, *next
;
6078 /* Similarly, move any line notes that appear after the epilogue.
6079 There is no need, however, to be quite so anal about the existence
6080 of such a note. Also possibly move
6081 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6083 for (insn
= epilogue_seq
; insn
; insn
= next
)
6085 next
= NEXT_INSN (insn
);
6087 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6088 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6092 /* Threading the prologue and epilogue changes the artificial refs
6093 in the entry and exit blocks. */
6094 epilogue_completed
= 1;
6095 df_update_entry_exit_and_calls ();
6098 /* Reposition the prologue-end and epilogue-begin notes after
6099 instruction scheduling. */
6102 reposition_prologue_and_epilogue_notes (void)
6104 if (!targetm
.have_prologue ()
6105 && !targetm
.have_epilogue ()
6106 && !targetm
.have_sibcall_epilogue ())
6109 /* Since the hash table is created on demand, the fact that it is
6110 non-null is a signal that it is non-empty. */
6111 if (prologue_insn_hash
!= NULL
)
6113 size_t len
= prologue_insn_hash
->elements ();
6114 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6116 /* Scan from the beginning until we reach the last prologue insn. */
6117 /* ??? While we do have the CFG intact, there are two problems:
6118 (1) The prologue can contain loops (typically probing the stack),
6119 which means that the end of the prologue isn't in the first bb.
6120 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6121 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6125 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6128 else if (contains (insn
, prologue_insn_hash
))
6140 /* Scan forward looking for the PROLOGUE_END note. It should
6141 be right at the beginning of the block, possibly with other
6142 insn notes that got moved there. */
6143 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6146 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6151 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6153 last
= NEXT_INSN (last
);
6154 reorder_insns (note
, note
, last
);
6158 if (epilogue_insn_hash
!= NULL
)
6163 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6165 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6166 basic_block bb
= e
->src
;
6168 /* Scan from the beginning until we reach the first epilogue insn. */
6169 FOR_BB_INSNS (bb
, insn
)
6173 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6180 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6190 /* If the function has a single basic block, and no real
6191 epilogue insns (e.g. sibcall with no cleanup), the
6192 epilogue note can get scheduled before the prologue
6193 note. If we have frame related prologue insns, having
6194 them scanned during the epilogue will result in a crash.
6195 In this case re-order the epilogue note to just before
6196 the last insn in the block. */
6198 first
= BB_END (bb
);
6200 if (PREV_INSN (first
) != note
)
6201 reorder_insns (note
, note
, PREV_INSN (first
));
6207 /* Returns the name of function declared by FNDECL. */
6209 fndecl_name (tree fndecl
)
6213 return lang_hooks
.decl_printable_name (fndecl
, 2);
6216 /* Returns the name of function FN. */
6218 function_name (struct function
*fn
)
6220 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6221 return fndecl_name (fndecl
);
6224 /* Returns the name of the current function. */
6226 current_function_name (void)
6228 return function_name (cfun
);
6233 rest_of_handle_check_leaf_regs (void)
6235 #ifdef LEAF_REGISTERS
6236 crtl
->uses_only_leaf_regs
6237 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6242 /* Insert a TYPE into the used types hash table of CFUN. */
6245 used_types_insert_helper (tree type
, struct function
*func
)
6247 if (type
!= NULL
&& func
!= NULL
)
6249 if (func
->used_types_hash
== NULL
)
6250 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6252 func
->used_types_hash
->add (type
);
6256 /* Given a type, insert it into the used hash table in cfun. */
6258 used_types_insert (tree t
)
6260 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6265 if (TREE_CODE (t
) == ERROR_MARK
)
6267 if (TYPE_NAME (t
) == NULL_TREE
6268 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6269 t
= TYPE_MAIN_VARIANT (t
);
6270 if (debug_info_level
> DINFO_LEVEL_NONE
)
6273 used_types_insert_helper (t
, cfun
);
6276 /* So this might be a type referenced by a global variable.
6277 Record that type so that we can later decide to emit its
6278 debug information. */
6279 vec_safe_push (types_used_by_cur_var_decl
, t
);
6284 /* Helper to Hash a struct types_used_by_vars_entry. */
6287 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6289 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6291 return iterative_hash_object (entry
->type
,
6292 iterative_hash_object (entry
->var_decl
, 0));
6295 /* Hash function of the types_used_by_vars_entry hash table. */
6298 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6300 return hash_types_used_by_vars_entry (entry
);
6303 /*Equality function of the types_used_by_vars_entry hash table. */
6306 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6307 types_used_by_vars_entry
*e2
)
6309 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6312 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6315 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6317 if (type
!= NULL
&& var_decl
!= NULL
)
6319 types_used_by_vars_entry
**slot
;
6320 struct types_used_by_vars_entry e
;
6321 e
.var_decl
= var_decl
;
6323 if (types_used_by_vars_hash
== NULL
)
6324 types_used_by_vars_hash
6325 = hash_table
<used_type_hasher
>::create_ggc (37);
6327 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6330 struct types_used_by_vars_entry
*entry
;
6331 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6333 entry
->var_decl
= var_decl
;
6341 const pass_data pass_data_leaf_regs
=
6343 RTL_PASS
, /* type */
6344 "*leaf_regs", /* name */
6345 OPTGROUP_NONE
, /* optinfo_flags */
6346 TV_NONE
, /* tv_id */
6347 0, /* properties_required */
6348 0, /* properties_provided */
6349 0, /* properties_destroyed */
6350 0, /* todo_flags_start */
6351 0, /* todo_flags_finish */
6354 class pass_leaf_regs
: public rtl_opt_pass
6357 pass_leaf_regs (gcc::context
*ctxt
)
6358 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6361 /* opt_pass methods: */
6362 virtual unsigned int execute (function
*)
6364 return rest_of_handle_check_leaf_regs ();
6367 }; // class pass_leaf_regs
6372 make_pass_leaf_regs (gcc::context
*ctxt
)
6374 return new pass_leaf_regs (ctxt
);
6378 rest_of_handle_thread_prologue_and_epilogue (void)
6380 /* prepare_shrink_wrap is sensitive to the block structure of the control
6381 flow graph, so clean it up first. */
6385 /* On some machines, the prologue and epilogue code, or parts thereof,
6386 can be represented as RTL. Doing so lets us schedule insns between
6387 it and the rest of the code and also allows delayed branch
6388 scheduling to operate in the epilogue. */
6389 thread_prologue_and_epilogue_insns ();
6391 /* Some non-cold blocks may now be only reachable from cold blocks.
6393 fixup_partitions ();
6395 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6397 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6399 /* The stack usage info is finalized during prologue expansion. */
6400 if (flag_stack_usage_info
)
6401 output_stack_usage ();
6408 const pass_data pass_data_thread_prologue_and_epilogue
=
6410 RTL_PASS
, /* type */
6411 "pro_and_epilogue", /* name */
6412 OPTGROUP_NONE
, /* optinfo_flags */
6413 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6414 0, /* properties_required */
6415 0, /* properties_provided */
6416 0, /* properties_destroyed */
6417 0, /* todo_flags_start */
6418 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6421 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6424 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6425 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6428 /* opt_pass methods: */
6429 virtual unsigned int execute (function
*)
6431 return rest_of_handle_thread_prologue_and_epilogue ();
6434 }; // class pass_thread_prologue_and_epilogue
6439 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6441 return new pass_thread_prologue_and_epilogue (ctxt
);
6445 /* This mini-pass fixes fall-out from SSA in asm statements that have
6446 in-out constraints. Say you start with
6449 asm ("": "+mr" (inout));
6452 which is transformed very early to use explicit output and match operands:
6455 asm ("": "=mr" (inout) : "0" (inout));
6458 Or, after SSA and copyprop,
6460 asm ("": "=mr" (inout_2) : "0" (inout_1));
6463 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6464 they represent two separate values, so they will get different pseudo
6465 registers during expansion. Then, since the two operands need to match
6466 per the constraints, but use different pseudo registers, reload can
6467 only register a reload for these operands. But reloads can only be
6468 satisfied by hardregs, not by memory, so we need a register for this
6469 reload, just because we are presented with non-matching operands.
6470 So, even though we allow memory for this operand, no memory can be
6471 used for it, just because the two operands don't match. This can
6472 cause reload failures on register-starved targets.
6474 So it's a symptom of reload not being able to use memory for reloads
6475 or, alternatively it's also a symptom of both operands not coming into
6476 reload as matching (in which case the pseudo could go to memory just
6477 fine, as the alternative allows it, and no reload would be necessary).
6478 We fix the latter problem here, by transforming
6480 asm ("": "=mr" (inout_2) : "0" (inout_1));
6485 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6488 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6491 bool changed
= false;
6492 rtx op
= SET_SRC (p_sets
[0]);
6493 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6494 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6495 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6497 memset (output_matched
, 0, noutputs
* sizeof (bool));
6498 for (i
= 0; i
< ninputs
; i
++)
6502 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6506 if (*constraint
== '%')
6509 match
= strtoul (constraint
, &end
, 10);
6510 if (end
== constraint
)
6513 gcc_assert (match
< noutputs
);
6514 output
= SET_DEST (p_sets
[match
]);
6515 input
= RTVEC_ELT (inputs
, i
);
6516 /* Only do the transformation for pseudos. */
6517 if (! REG_P (output
)
6518 || rtx_equal_p (output
, input
)
6519 || (GET_MODE (input
) != VOIDmode
6520 && GET_MODE (input
) != GET_MODE (output
)))
6523 /* We can't do anything if the output is also used as input,
6524 as we're going to overwrite it. */
6525 for (j
= 0; j
< ninputs
; j
++)
6526 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6531 /* Avoid changing the same input several times. For
6532 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6533 only change in once (to out1), rather than changing it
6534 first to out1 and afterwards to out2. */
6537 for (j
= 0; j
< noutputs
; j
++)
6538 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6543 output_matched
[match
] = true;
6546 emit_move_insn (output
, input
);
6547 insns
= get_insns ();
6549 emit_insn_before (insns
, insn
);
6551 /* Now replace all mentions of the input with output. We can't
6552 just replace the occurrence in inputs[i], as the register might
6553 also be used in some other input (or even in an address of an
6554 output), which would mean possibly increasing the number of
6555 inputs by one (namely 'output' in addition), which might pose
6556 a too complicated problem for reload to solve. E.g. this situation:
6558 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6560 Here 'input' is used in two occurrences as input (once for the
6561 input operand, once for the address in the second output operand).
6562 If we would replace only the occurrence of the input operand (to
6563 make the matching) we would be left with this:
6566 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6568 Now we suddenly have two different input values (containing the same
6569 value, but different pseudos) where we formerly had only one.
6570 With more complicated asms this might lead to reload failures
6571 which wouldn't have happen without this pass. So, iterate over
6572 all operands and replace all occurrences of the register used. */
6573 for (j
= 0; j
< noutputs
; j
++)
6574 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6575 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6576 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6578 for (j
= 0; j
< ninputs
; j
++)
6579 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6580 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6587 df_insn_rescan (insn
);
6590 /* Add the decl D to the local_decls list of FUN. */
6593 add_local_decl (struct function
*fun
, tree d
)
6595 gcc_assert (VAR_P (d
));
6596 vec_safe_push (fun
->local_decls
, d
);
6601 const pass_data pass_data_match_asm_constraints
=
6603 RTL_PASS
, /* type */
6604 "asmcons", /* name */
6605 OPTGROUP_NONE
, /* optinfo_flags */
6606 TV_NONE
, /* tv_id */
6607 0, /* properties_required */
6608 0, /* properties_provided */
6609 0, /* properties_destroyed */
6610 0, /* todo_flags_start */
6611 0, /* todo_flags_finish */
6614 class pass_match_asm_constraints
: public rtl_opt_pass
6617 pass_match_asm_constraints (gcc::context
*ctxt
)
6618 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6621 /* opt_pass methods: */
6622 virtual unsigned int execute (function
*);
6624 }; // class pass_match_asm_constraints
6627 pass_match_asm_constraints::execute (function
*fun
)
6634 if (!crtl
->has_asm_statement
)
6637 df_set_flags (DF_DEFER_INSN_RESCAN
);
6638 FOR_EACH_BB_FN (bb
, fun
)
6640 FOR_BB_INSNS (bb
, insn
)
6645 pat
= PATTERN (insn
);
6646 if (GET_CODE (pat
) == PARALLEL
)
6647 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6648 else if (GET_CODE (pat
) == SET
)
6649 p_sets
= &PATTERN (insn
), noutputs
= 1;
6653 if (GET_CODE (*p_sets
) == SET
6654 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6655 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6659 return TODO_df_finish
;
6665 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6667 return new pass_match_asm_constraints (ctxt
);
6671 #include "gt-function.h"