1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
45 #include "stringpool.h"
48 #include "hard-reg-set.h"
51 #include "insn-config.h"
59 #include "insn-codes.h"
66 #include "langhooks.h"
68 #include "common/common-target.h"
69 #include "gimple-expr.h"
71 #include "tree-pass.h"
73 #include "dominance.h"
78 #include "cfgcleanup.h"
79 #include "basic-block.h"
82 #include "bb-reorder.h"
83 #include "shrink-wrap.h"
86 #include "tree-chkp.h"
89 /* So we can assign to cfun in this file. */
92 #ifndef STACK_ALIGNMENT_NEEDED
93 #define STACK_ALIGNMENT_NEEDED 1
96 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
98 /* Round a value to the lowest integer less than it that is a multiple of
99 the required alignment. Avoid using division in case the value is
100 negative. Assume the alignment is a power of two. */
101 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
103 /* Similar, but round to the next highest integer that meets the
105 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
107 /* Nonzero once virtual register instantiation has been done.
108 assign_stack_local uses frame_pointer_rtx when this is nonzero.
109 calls.c:emit_library_call_value_1 uses it to set up
110 post-instantiation libcalls. */
111 int virtuals_instantiated
;
113 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
114 static GTY(()) int funcdef_no
;
116 /* These variables hold pointers to functions to create and destroy
117 target specific, per-function data structures. */
118 struct machine_function
* (*init_machine_status
) (void);
120 /* The currently compiled function. */
121 struct function
*cfun
= 0;
123 /* These hashes record the prologue and epilogue insns. */
125 struct insn_cache_hasher
: ggc_cache_hasher
<rtx
>
127 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
128 static bool equal (rtx a
, rtx b
) { return a
== b
; }
132 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
134 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
137 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
138 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
140 /* Forward declarations. */
142 static struct temp_slot
*find_temp_slot_from_address (rtx
);
143 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
144 static void pad_below (struct args_size
*, machine_mode
, tree
);
145 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
146 static int all_blocks (tree
, tree
*);
147 static tree
*get_block_vector (tree
, int *);
148 extern tree
debug_find_var_in_block_tree (tree
, tree
);
149 /* We always define `record_insns' even if it's not used so that we
150 can always export `prologue_epilogue_contains'. */
151 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
153 static bool contains (const_rtx
, hash_table
<insn_cache_hasher
> *);
154 static void prepare_function_start (void);
155 static void do_clobber_return_reg (rtx
, void *);
156 static void do_use_return_reg (rtx
, void *);
158 /* Stack of nested functions. */
159 /* Keep track of the cfun stack. */
161 typedef struct function
*function_p
;
163 static vec
<function_p
> function_context_stack
;
165 /* Save the current context for compilation of a nested function.
166 This is called from language-specific code. */
169 push_function_context (void)
172 allocate_struct_function (NULL
, false);
174 function_context_stack
.safe_push (cfun
);
178 /* Restore the last saved context, at the end of a nested function.
179 This function is called from language-specific code. */
182 pop_function_context (void)
184 struct function
*p
= function_context_stack
.pop ();
186 current_function_decl
= p
->decl
;
188 /* Reset variables that have known state during rtx generation. */
189 virtuals_instantiated
= 0;
190 generating_concat_p
= 1;
193 /* Clear out all parts of the state in F that can safely be discarded
194 after the function has been parsed, but not compiled, to let
195 garbage collection reclaim the memory. */
198 free_after_parsing (struct function
*f
)
203 /* Clear out all parts of the state in F that can safely be discarded
204 after the function has been compiled, to let garbage collection
205 reclaim the memory. */
208 free_after_compilation (struct function
*f
)
210 prologue_insn_hash
= NULL
;
211 epilogue_insn_hash
= NULL
;
213 free (crtl
->emit
.regno_pointer_align
);
215 memset (crtl
, 0, sizeof (struct rtl_data
));
220 regno_reg_rtx
= NULL
;
223 /* Return size needed for stack frame based on slots so far allocated.
224 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
225 the caller may have to do that. */
228 get_frame_size (void)
230 if (FRAME_GROWS_DOWNWARD
)
231 return -frame_offset
;
236 /* Issue an error message and return TRUE if frame OFFSET overflows in
237 the signed target pointer arithmetics for function FUNC. Otherwise
241 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
243 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
245 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
246 /* Leave room for the fixed part of the frame. */
247 - 64 * UNITS_PER_WORD
)
249 error_at (DECL_SOURCE_LOCATION (func
),
250 "total size of local objects too large");
257 /* Return stack slot alignment in bits for TYPE and MODE. */
260 get_stack_local_alignment (tree type
, machine_mode mode
)
262 unsigned int alignment
;
265 alignment
= BIGGEST_ALIGNMENT
;
267 alignment
= GET_MODE_ALIGNMENT (mode
);
269 /* Allow the frond-end to (possibly) increase the alignment of this
272 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
274 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
277 /* Determine whether it is possible to fit a stack slot of size SIZE and
278 alignment ALIGNMENT into an area in the stack frame that starts at
279 frame offset START and has a length of LENGTH. If so, store the frame
280 offset to be used for the stack slot in *POFFSET and return true;
281 return false otherwise. This function will extend the frame size when
282 given a start/length pair that lies at the end of the frame. */
285 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
286 HOST_WIDE_INT size
, unsigned int alignment
,
287 HOST_WIDE_INT
*poffset
)
289 HOST_WIDE_INT this_frame_offset
;
290 int frame_off
, frame_alignment
, frame_phase
;
292 /* Calculate how many bytes the start of local variables is off from
294 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
295 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
296 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
298 /* Round the frame offset to the specified alignment. */
300 /* We must be careful here, since FRAME_OFFSET might be negative and
301 division with a negative dividend isn't as well defined as we might
302 like. So we instead assume that ALIGNMENT is a power of two and
303 use logical operations which are unambiguous. */
304 if (FRAME_GROWS_DOWNWARD
)
306 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
307 (unsigned HOST_WIDE_INT
) alignment
)
311 = (CEIL_ROUND (start
- frame_phase
,
312 (unsigned HOST_WIDE_INT
) alignment
)
315 /* See if it fits. If this space is at the edge of the frame,
316 consider extending the frame to make it fit. Our caller relies on
317 this when allocating a new slot. */
318 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
319 frame_offset
= this_frame_offset
;
320 else if (this_frame_offset
< start
)
322 else if (start
+ length
== frame_offset
323 && this_frame_offset
+ size
> start
+ length
)
324 frame_offset
= this_frame_offset
+ size
;
325 else if (this_frame_offset
+ size
> start
+ length
)
328 *poffset
= this_frame_offset
;
332 /* Create a new frame_space structure describing free space in the stack
333 frame beginning at START and ending at END, and chain it into the
334 function's frame_space_list. */
337 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
339 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
340 space
->next
= crtl
->frame_space_list
;
341 crtl
->frame_space_list
= space
;
342 space
->start
= start
;
343 space
->length
= end
- start
;
346 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
347 with machine mode MODE.
349 ALIGN controls the amount of alignment for the address of the slot:
350 0 means according to MODE,
351 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
352 -2 means use BITS_PER_UNIT,
353 positive specifies alignment boundary in bits.
355 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
356 alignment and ASLK_RECORD_PAD bit set if we should remember
357 extra space we allocated for alignment purposes. When we are
358 called from assign_stack_temp_for_type, it is not set so we don't
359 track the same stack slot in two independent lists.
361 We do not round to stack_boundary here. */
364 assign_stack_local_1 (machine_mode mode
, HOST_WIDE_INT size
,
368 int bigend_correction
= 0;
369 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
370 unsigned int alignment
, alignment_in_bits
;
374 alignment
= get_stack_local_alignment (NULL
, mode
);
375 alignment
/= BITS_PER_UNIT
;
377 else if (align
== -1)
379 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
380 size
= CEIL_ROUND (size
, alignment
);
382 else if (align
== -2)
383 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
385 alignment
= align
/ BITS_PER_UNIT
;
387 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
389 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
390 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
392 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
393 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
396 if (SUPPORTS_STACK_ALIGNMENT
)
398 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
400 if (!crtl
->stack_realign_processed
)
401 crtl
->stack_alignment_estimated
= alignment_in_bits
;
404 /* If stack is realigned and stack alignment value
405 hasn't been finalized, it is OK not to increase
406 stack_alignment_estimated. The bigger alignment
407 requirement is recorded in stack_alignment_needed
409 gcc_assert (!crtl
->stack_realign_finalized
);
410 if (!crtl
->stack_realign_needed
)
412 /* It is OK to reduce the alignment as long as the
413 requested size is 0 or the estimated stack
414 alignment >= mode alignment. */
415 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
417 || (crtl
->stack_alignment_estimated
418 >= GET_MODE_ALIGNMENT (mode
)));
419 alignment_in_bits
= crtl
->stack_alignment_estimated
;
420 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
426 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
427 crtl
->stack_alignment_needed
= alignment_in_bits
;
428 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
429 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
431 if (mode
!= BLKmode
|| size
!= 0)
433 if (kind
& ASLK_RECORD_PAD
)
435 struct frame_space
**psp
;
437 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
439 struct frame_space
*space
= *psp
;
440 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
441 alignment
, &slot_offset
))
444 if (slot_offset
> space
->start
)
445 add_frame_space (space
->start
, slot_offset
);
446 if (slot_offset
+ size
< space
->start
+ space
->length
)
447 add_frame_space (slot_offset
+ size
,
448 space
->start
+ space
->length
);
453 else if (!STACK_ALIGNMENT_NEEDED
)
455 slot_offset
= frame_offset
;
459 old_frame_offset
= frame_offset
;
461 if (FRAME_GROWS_DOWNWARD
)
463 frame_offset
-= size
;
464 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
466 if (kind
& ASLK_RECORD_PAD
)
468 if (slot_offset
> frame_offset
)
469 add_frame_space (frame_offset
, slot_offset
);
470 if (slot_offset
+ size
< old_frame_offset
)
471 add_frame_space (slot_offset
+ size
, old_frame_offset
);
476 frame_offset
+= size
;
477 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
479 if (kind
& ASLK_RECORD_PAD
)
481 if (slot_offset
> old_frame_offset
)
482 add_frame_space (old_frame_offset
, slot_offset
);
483 if (slot_offset
+ size
< frame_offset
)
484 add_frame_space (slot_offset
+ size
, frame_offset
);
489 /* On a big-endian machine, if we are allocating more space than we will use,
490 use the least significant bytes of those that are allocated. */
491 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
492 bigend_correction
= size
- GET_MODE_SIZE (mode
);
494 /* If we have already instantiated virtual registers, return the actual
495 address relative to the frame pointer. */
496 if (virtuals_instantiated
)
497 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
499 (slot_offset
+ bigend_correction
500 + STARTING_FRAME_OFFSET
, Pmode
));
502 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
504 (slot_offset
+ bigend_correction
,
507 x
= gen_rtx_MEM (mode
, addr
);
508 set_mem_align (x
, alignment_in_bits
);
509 MEM_NOTRAP_P (x
) = 1;
512 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
514 if (frame_offset_overflow (frame_offset
, current_function_decl
))
520 /* Wrap up assign_stack_local_1 with last parameter as false. */
523 assign_stack_local (machine_mode mode
, HOST_WIDE_INT size
, int align
)
525 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
528 /* In order to evaluate some expressions, such as function calls returning
529 structures in memory, we need to temporarily allocate stack locations.
530 We record each allocated temporary in the following structure.
532 Associated with each temporary slot is a nesting level. When we pop up
533 one level, all temporaries associated with the previous level are freed.
534 Normally, all temporaries are freed after the execution of the statement
535 in which they were created. However, if we are inside a ({...}) grouping,
536 the result may be in a temporary and hence must be preserved. If the
537 result could be in a temporary, we preserve it if we can determine which
538 one it is in. If we cannot determine which temporary may contain the
539 result, all temporaries are preserved. A temporary is preserved by
540 pretending it was allocated at the previous nesting level. */
542 struct GTY(()) temp_slot
{
543 /* Points to next temporary slot. */
544 struct temp_slot
*next
;
545 /* Points to previous temporary slot. */
546 struct temp_slot
*prev
;
547 /* The rtx to used to reference the slot. */
549 /* The size, in units, of the slot. */
551 /* The type of the object in the slot, or zero if it doesn't correspond
552 to a type. We use this to determine whether a slot can be reused.
553 It can be reused if objects of the type of the new slot will always
554 conflict with objects of the type of the old slot. */
556 /* The alignment (in bits) of the slot. */
558 /* Nonzero if this temporary is currently in use. */
560 /* Nesting level at which this slot is being used. */
562 /* The offset of the slot from the frame_pointer, including extra space
563 for alignment. This info is for combine_temp_slots. */
564 HOST_WIDE_INT base_offset
;
565 /* The size of the slot, including extra space for alignment. This
566 info is for combine_temp_slots. */
567 HOST_WIDE_INT full_size
;
570 /* Entry for the below hash table. */
571 struct GTY((for_user
)) temp_slot_address_entry
{
574 struct temp_slot
*temp_slot
;
577 struct temp_address_hasher
: ggc_hasher
<temp_slot_address_entry
*>
579 static hashval_t
hash (temp_slot_address_entry
*);
580 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
583 /* A table of addresses that represent a stack slot. The table is a mapping
584 from address RTXen to a temp slot. */
585 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
586 static size_t n_temp_slots_in_use
;
588 /* Removes temporary slot TEMP from LIST. */
591 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
594 temp
->next
->prev
= temp
->prev
;
596 temp
->prev
->next
= temp
->next
;
600 temp
->prev
= temp
->next
= NULL
;
603 /* Inserts temporary slot TEMP to LIST. */
606 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
610 (*list
)->prev
= temp
;
615 /* Returns the list of used temp slots at LEVEL. */
617 static struct temp_slot
**
618 temp_slots_at_level (int level
)
620 if (level
>= (int) vec_safe_length (used_temp_slots
))
621 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
623 return &(*used_temp_slots
)[level
];
626 /* Returns the maximal temporary slot level. */
629 max_slot_level (void)
631 if (!used_temp_slots
)
634 return used_temp_slots
->length () - 1;
637 /* Moves temporary slot TEMP to LEVEL. */
640 move_slot_to_level (struct temp_slot
*temp
, int level
)
642 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
643 insert_slot_to_list (temp
, temp_slots_at_level (level
));
647 /* Make temporary slot TEMP available. */
650 make_slot_available (struct temp_slot
*temp
)
652 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
653 insert_slot_to_list (temp
, &avail_temp_slots
);
656 n_temp_slots_in_use
--;
659 /* Compute the hash value for an address -> temp slot mapping.
660 The value is cached on the mapping entry. */
662 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
664 int do_not_record
= 0;
665 return hash_rtx (t
->address
, GET_MODE (t
->address
),
666 &do_not_record
, NULL
, false);
669 /* Return the hash value for an address -> temp slot mapping. */
671 temp_address_hasher::hash (temp_slot_address_entry
*t
)
676 /* Compare two address -> temp slot mapping entries. */
678 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
679 temp_slot_address_entry
*t2
)
681 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
684 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
686 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
688 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
689 t
->address
= address
;
690 t
->temp_slot
= temp_slot
;
691 t
->hash
= temp_slot_address_compute_hash (t
);
692 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
695 /* Remove an address -> temp slot mapping entry if the temp slot is
696 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
698 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
700 const struct temp_slot_address_entry
*t
= *slot
;
701 if (! t
->temp_slot
->in_use
)
702 temp_slot_address_table
->clear_slot (slot
);
706 /* Remove all mappings of addresses to unused temp slots. */
708 remove_unused_temp_slot_addresses (void)
710 /* Use quicker clearing if there aren't any active temp slots. */
711 if (n_temp_slots_in_use
)
712 temp_slot_address_table
->traverse
713 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
715 temp_slot_address_table
->empty ();
718 /* Find the temp slot corresponding to the object at address X. */
720 static struct temp_slot
*
721 find_temp_slot_from_address (rtx x
)
724 struct temp_slot_address_entry tmp
, *t
;
726 /* First try the easy way:
727 See if X exists in the address -> temp slot mapping. */
729 tmp
.temp_slot
= NULL
;
730 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
731 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
735 /* If we have a sum involving a register, see if it points to a temp
737 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
738 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
740 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
741 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
744 /* Last resort: Address is a virtual stack var address. */
745 if (GET_CODE (x
) == PLUS
746 && XEXP (x
, 0) == virtual_stack_vars_rtx
747 && CONST_INT_P (XEXP (x
, 1)))
750 for (i
= max_slot_level (); i
>= 0; i
--)
751 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
753 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
754 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
762 /* Allocate a temporary stack slot and record it for possible later
765 MODE is the machine mode to be given to the returned rtx.
767 SIZE is the size in units of the space required. We do no rounding here
768 since assign_stack_local will do any required rounding.
770 TYPE is the type that will be used for the stack slot. */
773 assign_stack_temp_for_type (machine_mode mode
, HOST_WIDE_INT size
,
777 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
780 /* If SIZE is -1 it means that somebody tried to allocate a temporary
781 of a variable size. */
782 gcc_assert (size
!= -1);
784 align
= get_stack_local_alignment (type
, mode
);
786 /* Try to find an available, already-allocated temporary of the proper
787 mode which meets the size and alignment requirements. Choose the
788 smallest one with the closest alignment.
790 If assign_stack_temp is called outside of the tree->rtl expansion,
791 we cannot reuse the stack slots (that may still refer to
792 VIRTUAL_STACK_VARS_REGNUM). */
793 if (!virtuals_instantiated
)
795 for (p
= avail_temp_slots
; p
; p
= p
->next
)
797 if (p
->align
>= align
&& p
->size
>= size
798 && GET_MODE (p
->slot
) == mode
799 && objects_must_conflict_p (p
->type
, type
)
800 && (best_p
== 0 || best_p
->size
> p
->size
801 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
803 if (p
->align
== align
&& p
->size
== size
)
806 cut_slot_from_list (selected
, &avail_temp_slots
);
815 /* Make our best, if any, the one to use. */
819 cut_slot_from_list (selected
, &avail_temp_slots
);
821 /* If there are enough aligned bytes left over, make them into a new
822 temp_slot so that the extra bytes don't get wasted. Do this only
823 for BLKmode slots, so that we can be sure of the alignment. */
824 if (GET_MODE (best_p
->slot
) == BLKmode
)
826 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
827 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
829 if (best_p
->size
- rounded_size
>= alignment
)
831 p
= ggc_alloc
<temp_slot
> ();
833 p
->size
= best_p
->size
- rounded_size
;
834 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
835 p
->full_size
= best_p
->full_size
- rounded_size
;
836 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
837 p
->align
= best_p
->align
;
838 p
->type
= best_p
->type
;
839 insert_slot_to_list (p
, &avail_temp_slots
);
841 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
844 best_p
->size
= rounded_size
;
845 best_p
->full_size
= rounded_size
;
850 /* If we still didn't find one, make a new temporary. */
853 HOST_WIDE_INT frame_offset_old
= frame_offset
;
855 p
= ggc_alloc
<temp_slot
> ();
857 /* We are passing an explicit alignment request to assign_stack_local.
858 One side effect of that is assign_stack_local will not round SIZE
859 to ensure the frame offset remains suitably aligned.
861 So for requests which depended on the rounding of SIZE, we go ahead
862 and round it now. We also make sure ALIGNMENT is at least
863 BIGGEST_ALIGNMENT. */
864 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
865 p
->slot
= assign_stack_local_1 (mode
,
875 /* The following slot size computation is necessary because we don't
876 know the actual size of the temporary slot until assign_stack_local
877 has performed all the frame alignment and size rounding for the
878 requested temporary. Note that extra space added for alignment
879 can be either above or below this stack slot depending on which
880 way the frame grows. We include the extra space if and only if it
881 is above this slot. */
882 if (FRAME_GROWS_DOWNWARD
)
883 p
->size
= frame_offset_old
- frame_offset
;
887 /* Now define the fields used by combine_temp_slots. */
888 if (FRAME_GROWS_DOWNWARD
)
890 p
->base_offset
= frame_offset
;
891 p
->full_size
= frame_offset_old
- frame_offset
;
895 p
->base_offset
= frame_offset_old
;
896 p
->full_size
= frame_offset
- frame_offset_old
;
905 p
->level
= temp_slot_level
;
906 n_temp_slots_in_use
++;
908 pp
= temp_slots_at_level (p
->level
);
909 insert_slot_to_list (p
, pp
);
910 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
912 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
913 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
914 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
916 /* If we know the alias set for the memory that will be used, use
917 it. If there's no TYPE, then we don't know anything about the
918 alias set for the memory. */
919 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
920 set_mem_align (slot
, align
);
922 /* If a type is specified, set the relevant flags. */
924 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
925 MEM_NOTRAP_P (slot
) = 1;
930 /* Allocate a temporary stack slot and record it for possible later
931 reuse. First two arguments are same as in preceding function. */
934 assign_stack_temp (machine_mode mode
, HOST_WIDE_INT size
)
936 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
939 /* Assign a temporary.
940 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
941 and so that should be used in error messages. In either case, we
942 allocate of the given type.
943 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
944 it is 0 if a register is OK.
945 DONT_PROMOTE is 1 if we should not promote values in register
949 assign_temp (tree type_or_decl
, int memory_required
,
950 int dont_promote ATTRIBUTE_UNUSED
)
958 if (DECL_P (type_or_decl
))
959 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
961 decl
= NULL
, type
= type_or_decl
;
963 mode
= TYPE_MODE (type
);
965 unsignedp
= TYPE_UNSIGNED (type
);
968 if (mode
== BLKmode
|| memory_required
)
970 HOST_WIDE_INT size
= int_size_in_bytes (type
);
973 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
974 problems with allocating the stack space. */
978 /* Unfortunately, we don't yet know how to allocate variable-sized
979 temporaries. However, sometimes we can find a fixed upper limit on
980 the size, so try that instead. */
982 size
= max_int_size_in_bytes (type
);
984 /* The size of the temporary may be too large to fit into an integer. */
985 /* ??? Not sure this should happen except for user silliness, so limit
986 this to things that aren't compiler-generated temporaries. The
987 rest of the time we'll die in assign_stack_temp_for_type. */
988 if (decl
&& size
== -1
989 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
991 error ("size of variable %q+D is too large", decl
);
995 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
1001 mode
= promote_mode (type
, mode
, &unsignedp
);
1004 return gen_reg_rtx (mode
);
1007 /* Combine temporary stack slots which are adjacent on the stack.
1009 This allows for better use of already allocated stack space. This is only
1010 done for BLKmode slots because we can be sure that we won't have alignment
1011 problems in this case. */
1014 combine_temp_slots (void)
1016 struct temp_slot
*p
, *q
, *next
, *next_q
;
1019 /* We can't combine slots, because the information about which slot
1020 is in which alias set will be lost. */
1021 if (flag_strict_aliasing
)
1024 /* If there are a lot of temp slots, don't do anything unless
1025 high levels of optimization. */
1026 if (! flag_expensive_optimizations
)
1027 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1028 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1031 for (p
= avail_temp_slots
; p
; p
= next
)
1037 if (GET_MODE (p
->slot
) != BLKmode
)
1040 for (q
= p
->next
; q
; q
= next_q
)
1046 if (GET_MODE (q
->slot
) != BLKmode
)
1049 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1051 /* Q comes after P; combine Q into P. */
1053 p
->full_size
+= q
->full_size
;
1056 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1058 /* P comes after Q; combine P into Q. */
1060 q
->full_size
+= p
->full_size
;
1065 cut_slot_from_list (q
, &avail_temp_slots
);
1068 /* Either delete P or advance past it. */
1070 cut_slot_from_list (p
, &avail_temp_slots
);
1074 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1075 slot that previously was known by OLD_RTX. */
1078 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1080 struct temp_slot
*p
;
1082 if (rtx_equal_p (old_rtx
, new_rtx
))
1085 p
= find_temp_slot_from_address (old_rtx
);
1087 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1088 NEW_RTX is a register, see if one operand of the PLUS is a
1089 temporary location. If so, NEW_RTX points into it. Otherwise,
1090 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1091 in common between them. If so, try a recursive call on those
1095 if (GET_CODE (old_rtx
) != PLUS
)
1098 if (REG_P (new_rtx
))
1100 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1101 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1104 else if (GET_CODE (new_rtx
) != PLUS
)
1107 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1108 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1109 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1110 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1111 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1112 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1113 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1114 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1119 /* Otherwise add an alias for the temp's address. */
1120 insert_temp_slot_address (new_rtx
, p
);
1123 /* If X could be a reference to a temporary slot, mark that slot as
1124 belonging to the to one level higher than the current level. If X
1125 matched one of our slots, just mark that one. Otherwise, we can't
1126 easily predict which it is, so upgrade all of them.
1128 This is called when an ({...}) construct occurs and a statement
1129 returns a value in memory. */
1132 preserve_temp_slots (rtx x
)
1134 struct temp_slot
*p
= 0, *next
;
1139 /* If X is a register that is being used as a pointer, see if we have
1140 a temporary slot we know it points to. */
1141 if (REG_P (x
) && REG_POINTER (x
))
1142 p
= find_temp_slot_from_address (x
);
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1149 /* First see if we can find a match. */
1151 p
= find_temp_slot_from_address (XEXP (x
, 0));
1155 if (p
->level
== temp_slot_level
)
1156 move_slot_to_level (p
, temp_slot_level
- 1);
1160 /* Otherwise, preserve all non-kept slots at this level. */
1161 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1164 move_slot_to_level (p
, temp_slot_level
- 1);
1168 /* Free all temporaries used so far. This is normally called at the
1169 end of generating code for a statement. */
1172 free_temp_slots (void)
1174 struct temp_slot
*p
, *next
;
1175 bool some_available
= false;
1177 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1180 make_slot_available (p
);
1181 some_available
= true;
1186 remove_unused_temp_slot_addresses ();
1187 combine_temp_slots ();
1191 /* Push deeper into the nesting level for stack temporaries. */
1194 push_temp_slots (void)
1199 /* Pop a temporary nesting level. All slots in use in the current level
1203 pop_temp_slots (void)
1209 /* Initialize temporary slots. */
1212 init_temp_slots (void)
1214 /* We have not allocated any temporaries yet. */
1215 avail_temp_slots
= 0;
1216 vec_alloc (used_temp_slots
, 0);
1217 temp_slot_level
= 0;
1218 n_temp_slots_in_use
= 0;
1220 /* Set up the table to map addresses to temp slots. */
1221 if (! temp_slot_address_table
)
1222 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1224 temp_slot_address_table
->empty ();
1227 /* Functions and data structures to keep track of the values hard regs
1228 had at the start of the function. */
1230 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1231 and has_hard_reg_initial_val.. */
1232 typedef struct GTY(()) initial_value_pair
{
1235 } initial_value_pair
;
1236 /* ??? This could be a VEC but there is currently no way to define an
1237 opaque VEC type. This could be worked around by defining struct
1238 initial_value_pair in function.h. */
1239 typedef struct GTY(()) initial_value_struct
{
1242 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1243 } initial_value_struct
;
1245 /* If a pseudo represents an initial hard reg (or expression), return
1246 it, else return NULL_RTX. */
1249 get_hard_reg_initial_reg (rtx reg
)
1251 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1257 for (i
= 0; i
< ivs
->num_entries
; i
++)
1258 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1259 return ivs
->entries
[i
].hard_reg
;
1264 /* Make sure that there's a pseudo register of mode MODE that stores the
1265 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1268 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1270 struct initial_value_struct
*ivs
;
1273 rv
= has_hard_reg_initial_val (mode
, regno
);
1277 ivs
= crtl
->hard_reg_initial_vals
;
1280 ivs
= ggc_alloc
<initial_value_struct
> ();
1281 ivs
->num_entries
= 0;
1282 ivs
->max_entries
= 5;
1283 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1284 crtl
->hard_reg_initial_vals
= ivs
;
1287 if (ivs
->num_entries
>= ivs
->max_entries
)
1289 ivs
->max_entries
+= 5;
1290 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1294 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1295 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1297 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1300 /* See if get_hard_reg_initial_val has been used to create a pseudo
1301 for the initial value of hard register REGNO in mode MODE. Return
1302 the associated pseudo if so, otherwise return NULL. */
1305 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1307 struct initial_value_struct
*ivs
;
1310 ivs
= crtl
->hard_reg_initial_vals
;
1312 for (i
= 0; i
< ivs
->num_entries
; i
++)
1313 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1314 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1315 return ivs
->entries
[i
].pseudo
;
1321 emit_initial_value_sets (void)
1323 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1331 for (i
= 0; i
< ivs
->num_entries
; i
++)
1332 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1336 emit_insn_at_entry (seq
);
1340 /* Return the hardreg-pseudoreg initial values pair entry I and
1341 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1343 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1345 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1346 if (!ivs
|| i
>= ivs
->num_entries
)
1349 *hreg
= ivs
->entries
[i
].hard_reg
;
1350 *preg
= ivs
->entries
[i
].pseudo
;
1354 /* These routines are responsible for converting virtual register references
1355 to the actual hard register references once RTL generation is complete.
1357 The following four variables are used for communication between the
1358 routines. They contain the offsets of the virtual registers from their
1359 respective hard registers. */
1361 static int in_arg_offset
;
1362 static int var_offset
;
1363 static int dynamic_offset
;
1364 static int out_arg_offset
;
1365 static int cfa_offset
;
1367 /* In most machines, the stack pointer register is equivalent to the bottom
1370 #ifndef STACK_POINTER_OFFSET
1371 #define STACK_POINTER_OFFSET 0
1374 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1375 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1378 /* If not defined, pick an appropriate default for the offset of dynamically
1379 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1380 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1382 #ifndef STACK_DYNAMIC_OFFSET
1384 /* The bottom of the stack points to the actual arguments. If
1385 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1386 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1387 stack space for register parameters is not pushed by the caller, but
1388 rather part of the fixed stack areas and hence not included in
1389 `crtl->outgoing_args_size'. Nevertheless, we must allow
1390 for it when allocating stack dynamic objects. */
1392 #ifdef INCOMING_REG_PARM_STACK_SPACE
1393 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1394 ((ACCUMULATE_OUTGOING_ARGS \
1395 ? (crtl->outgoing_args_size \
1396 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1397 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1398 : 0) + (STACK_POINTER_OFFSET))
1400 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1401 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1402 + (STACK_POINTER_OFFSET))
1407 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1408 is a virtual register, return the equivalent hard register and set the
1409 offset indirectly through the pointer. Otherwise, return 0. */
1412 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1415 HOST_WIDE_INT offset
;
1417 if (x
== virtual_incoming_args_rtx
)
1419 if (stack_realign_drap
)
1421 /* Replace virtual_incoming_args_rtx with internal arg
1422 pointer if DRAP is used to realign stack. */
1423 new_rtx
= crtl
->args
.internal_arg_pointer
;
1427 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1429 else if (x
== virtual_stack_vars_rtx
)
1430 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1431 else if (x
== virtual_stack_dynamic_rtx
)
1432 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1433 else if (x
== virtual_outgoing_args_rtx
)
1434 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1435 else if (x
== virtual_cfa_rtx
)
1437 #ifdef FRAME_POINTER_CFA_OFFSET
1438 new_rtx
= frame_pointer_rtx
;
1440 new_rtx
= arg_pointer_rtx
;
1442 offset
= cfa_offset
;
1444 else if (x
== virtual_preferred_stack_boundary_rtx
)
1446 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1456 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1457 registers present inside of *LOC. The expression is simplified,
1458 as much as possible, but is not to be considered "valid" in any sense
1459 implied by the target. Return true if any change is made. */
1462 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1466 bool changed
= false;
1467 subrtx_ptr_iterator::array_type array
;
1468 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1474 HOST_WIDE_INT offset
;
1475 switch (GET_CODE (x
))
1478 new_rtx
= instantiate_new_reg (x
, &offset
);
1481 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1484 iter
.skip_subrtxes ();
1488 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1491 XEXP (x
, 0) = new_rtx
;
1492 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1494 iter
.skip_subrtxes ();
1498 /* FIXME -- from old code */
1499 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1500 we can commute the PLUS and SUBREG because pointers into the
1501 frame are well-behaved. */
1512 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1513 matches the predicate for insn CODE operand OPERAND. */
1516 safe_insn_predicate (int code
, int operand
, rtx x
)
1518 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1521 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1522 registers present inside of insn. The result will be a valid insn. */
1525 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1527 HOST_WIDE_INT offset
;
1529 bool any_change
= false;
1530 rtx set
, new_rtx
, x
;
1533 /* There are some special cases to be handled first. */
1534 set
= single_set (insn
);
1537 /* We're allowed to assign to a virtual register. This is interpreted
1538 to mean that the underlying register gets assigned the inverse
1539 transformation. This is used, for example, in the handling of
1541 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1546 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1547 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1548 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1549 x
= force_operand (x
, new_rtx
);
1551 emit_move_insn (new_rtx
, x
);
1556 emit_insn_before (seq
, insn
);
1561 /* Handle a straight copy from a virtual register by generating a
1562 new add insn. The difference between this and falling through
1563 to the generic case is avoiding a new pseudo and eliminating a
1564 move insn in the initial rtl stream. */
1565 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1566 if (new_rtx
&& offset
!= 0
1567 && REG_P (SET_DEST (set
))
1568 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1572 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1573 gen_int_mode (offset
,
1574 GET_MODE (SET_DEST (set
))),
1575 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1576 if (x
!= SET_DEST (set
))
1577 emit_move_insn (SET_DEST (set
), x
);
1582 emit_insn_before (seq
, insn
);
1587 extract_insn (insn
);
1588 insn_code
= INSN_CODE (insn
);
1590 /* Handle a plus involving a virtual register by determining if the
1591 operands remain valid if they're modified in place. */
1592 if (GET_CODE (SET_SRC (set
)) == PLUS
1593 && recog_data
.n_operands
>= 3
1594 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1595 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1596 && CONST_INT_P (recog_data
.operand
[2])
1597 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1599 offset
+= INTVAL (recog_data
.operand
[2]);
1601 /* If the sum is zero, then replace with a plain move. */
1603 && REG_P (SET_DEST (set
))
1604 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1607 emit_move_insn (SET_DEST (set
), new_rtx
);
1611 emit_insn_before (seq
, insn
);
1616 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1618 /* Using validate_change and apply_change_group here leaves
1619 recog_data in an invalid state. Since we know exactly what
1620 we want to check, do those two by hand. */
1621 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1622 && safe_insn_predicate (insn_code
, 2, x
))
1624 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1625 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1628 /* Fall through into the regular operand fixup loop in
1629 order to take care of operands other than 1 and 2. */
1635 extract_insn (insn
);
1636 insn_code
= INSN_CODE (insn
);
1639 /* In the general case, we expect virtual registers to appear only in
1640 operands, and then only as either bare registers or inside memories. */
1641 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1643 x
= recog_data
.operand
[i
];
1644 switch (GET_CODE (x
))
1648 rtx addr
= XEXP (x
, 0);
1650 if (!instantiate_virtual_regs_in_rtx (&addr
))
1654 x
= replace_equiv_address (x
, addr
, true);
1655 /* It may happen that the address with the virtual reg
1656 was valid (e.g. based on the virtual stack reg, which might
1657 be acceptable to the predicates with all offsets), whereas
1658 the address now isn't anymore, for instance when the address
1659 is still offsetted, but the base reg isn't virtual-stack-reg
1660 anymore. Below we would do a force_reg on the whole operand,
1661 but this insn might actually only accept memory. Hence,
1662 before doing that last resort, try to reload the address into
1663 a register, so this operand stays a MEM. */
1664 if (!safe_insn_predicate (insn_code
, i
, x
))
1666 addr
= force_reg (GET_MODE (addr
), addr
);
1667 x
= replace_equiv_address (x
, addr
, true);
1672 emit_insn_before (seq
, insn
);
1677 new_rtx
= instantiate_new_reg (x
, &offset
);
1678 if (new_rtx
== NULL
)
1686 /* Careful, special mode predicates may have stuff in
1687 insn_data[insn_code].operand[i].mode that isn't useful
1688 to us for computing a new value. */
1689 /* ??? Recognize address_operand and/or "p" constraints
1690 to see if (plus new offset) is a valid before we put
1691 this through expand_simple_binop. */
1692 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1693 gen_int_mode (offset
, GET_MODE (x
)),
1694 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1697 emit_insn_before (seq
, insn
);
1702 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1703 if (new_rtx
== NULL
)
1708 new_rtx
= expand_simple_binop
1709 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1710 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1711 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1714 emit_insn_before (seq
, insn
);
1716 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1717 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1725 /* At this point, X contains the new value for the operand.
1726 Validate the new value vs the insn predicate. Note that
1727 asm insns will have insn_code -1 here. */
1728 if (!safe_insn_predicate (insn_code
, i
, x
))
1733 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1734 x
= copy_to_reg (x
);
1737 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1741 emit_insn_before (seq
, insn
);
1744 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1750 /* Propagate operand changes into the duplicates. */
1751 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1752 *recog_data
.dup_loc
[i
]
1753 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1755 /* Force re-recognition of the instruction for validation. */
1756 INSN_CODE (insn
) = -1;
1759 if (asm_noperands (PATTERN (insn
)) >= 0)
1761 if (!check_asm_operands (PATTERN (insn
)))
1763 error_for_asm (insn
, "impossible constraint in %<asm%>");
1764 /* For asm goto, instead of fixing up all the edges
1765 just clear the template and clear input operands
1766 (asm goto doesn't have any output operands). */
1769 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1770 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1771 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1772 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1780 if (recog_memoized (insn
) < 0)
1781 fatal_insn_not_found (insn
);
1785 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1786 do any instantiation required. */
1789 instantiate_decl_rtl (rtx x
)
1796 /* If this is a CONCAT, recurse for the pieces. */
1797 if (GET_CODE (x
) == CONCAT
)
1799 instantiate_decl_rtl (XEXP (x
, 0));
1800 instantiate_decl_rtl (XEXP (x
, 1));
1804 /* If this is not a MEM, no need to do anything. Similarly if the
1805 address is a constant or a register that is not a virtual register. */
1810 if (CONSTANT_P (addr
)
1812 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1813 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1816 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1819 /* Helper for instantiate_decls called via walk_tree: Process all decls
1820 in the given DECL_VALUE_EXPR. */
1823 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1831 if (DECL_RTL_SET_P (t
))
1832 instantiate_decl_rtl (DECL_RTL (t
));
1833 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1834 && DECL_INCOMING_RTL (t
))
1835 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1836 if ((TREE_CODE (t
) == VAR_DECL
1837 || TREE_CODE (t
) == RESULT_DECL
)
1838 && DECL_HAS_VALUE_EXPR_P (t
))
1840 tree v
= DECL_VALUE_EXPR (t
);
1841 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1848 /* Subroutine of instantiate_decls: Process all decls in the given
1849 BLOCK node and all its subblocks. */
1852 instantiate_decls_1 (tree let
)
1856 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1858 if (DECL_RTL_SET_P (t
))
1859 instantiate_decl_rtl (DECL_RTL (t
));
1860 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1862 tree v
= DECL_VALUE_EXPR (t
);
1863 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1867 /* Process all subblocks. */
1868 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1869 instantiate_decls_1 (t
);
1872 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1873 all virtual registers in their DECL_RTL's. */
1876 instantiate_decls (tree fndecl
)
1881 /* Process all parameters of the function. */
1882 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1884 instantiate_decl_rtl (DECL_RTL (decl
));
1885 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1886 if (DECL_HAS_VALUE_EXPR_P (decl
))
1888 tree v
= DECL_VALUE_EXPR (decl
);
1889 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1893 if ((decl
= DECL_RESULT (fndecl
))
1894 && TREE_CODE (decl
) == RESULT_DECL
)
1896 if (DECL_RTL_SET_P (decl
))
1897 instantiate_decl_rtl (DECL_RTL (decl
));
1898 if (DECL_HAS_VALUE_EXPR_P (decl
))
1900 tree v
= DECL_VALUE_EXPR (decl
);
1901 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1905 /* Process the saved static chain if it exists. */
1906 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1907 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1908 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1910 /* Now process all variables defined in the function or its subblocks. */
1911 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1913 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1914 if (DECL_RTL_SET_P (decl
))
1915 instantiate_decl_rtl (DECL_RTL (decl
));
1916 vec_free (cfun
->local_decls
);
1919 /* Pass through the INSNS of function FNDECL and convert virtual register
1920 references to hard register references. */
1923 instantiate_virtual_regs (void)
1927 /* Compute the offsets to use for this function. */
1928 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1929 var_offset
= STARTING_FRAME_OFFSET
;
1930 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1931 out_arg_offset
= STACK_POINTER_OFFSET
;
1932 #ifdef FRAME_POINTER_CFA_OFFSET
1933 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1935 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1938 /* Initialize recognition, indicating that volatile is OK. */
1941 /* Scan through all the insns, instantiating every virtual register still
1943 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1946 /* These patterns in the instruction stream can never be recognized.
1947 Fortunately, they shouldn't contain virtual registers either. */
1948 if (GET_CODE (PATTERN (insn
)) == USE
1949 || GET_CODE (PATTERN (insn
)) == CLOBBER
1950 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1952 else if (DEBUG_INSN_P (insn
))
1953 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn
));
1955 instantiate_virtual_regs_in_insn (insn
);
1957 if (insn
->deleted ())
1960 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1962 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1964 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1967 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1968 instantiate_decls (current_function_decl
);
1970 targetm
.instantiate_decls ();
1972 /* Indicate that, from now on, assign_stack_local should use
1973 frame_pointer_rtx. */
1974 virtuals_instantiated
= 1;
1981 const pass_data pass_data_instantiate_virtual_regs
=
1983 RTL_PASS
, /* type */
1985 OPTGROUP_NONE
, /* optinfo_flags */
1986 TV_NONE
, /* tv_id */
1987 0, /* properties_required */
1988 0, /* properties_provided */
1989 0, /* properties_destroyed */
1990 0, /* todo_flags_start */
1991 0, /* todo_flags_finish */
1994 class pass_instantiate_virtual_regs
: public rtl_opt_pass
1997 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
1998 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
2001 /* opt_pass methods: */
2002 virtual unsigned int execute (function
*)
2004 return instantiate_virtual_regs ();
2007 }; // class pass_instantiate_virtual_regs
2012 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2014 return new pass_instantiate_virtual_regs (ctxt
);
2018 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2019 This means a type for which function calls must pass an address to the
2020 function or get an address back from the function.
2021 EXP may be a type node or an expression (whose type is tested). */
2024 aggregate_value_p (const_tree exp
, const_tree fntype
)
2026 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2027 int i
, regno
, nregs
;
2031 switch (TREE_CODE (fntype
))
2035 tree fndecl
= get_callee_fndecl (fntype
);
2037 fntype
= TREE_TYPE (fndecl
);
2038 else if (CALL_EXPR_FN (fntype
))
2039 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2041 /* For internal functions, assume nothing needs to be
2042 returned in memory. */
2047 fntype
= TREE_TYPE (fntype
);
2052 case IDENTIFIER_NODE
:
2056 /* We don't expect other tree types here. */
2060 if (VOID_TYPE_P (type
))
2063 /* If a record should be passed the same as its first (and only) member
2064 don't pass it as an aggregate. */
2065 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2066 return aggregate_value_p (first_field (type
), fntype
);
2068 /* If the front end has decided that this needs to be passed by
2069 reference, do so. */
2070 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2071 && DECL_BY_REFERENCE (exp
))
2074 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2075 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2078 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2079 and thus can't be returned in registers. */
2080 if (TREE_ADDRESSABLE (type
))
2083 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2086 if (targetm
.calls
.return_in_memory (type
, fntype
))
2089 /* Make sure we have suitable call-clobbered regs to return
2090 the value in; if not, we must return it in memory. */
2091 reg
= hard_function_value (type
, 0, fntype
, 0);
2093 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2098 regno
= REGNO (reg
);
2099 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2100 for (i
= 0; i
< nregs
; i
++)
2101 if (! call_used_regs
[regno
+ i
])
2107 /* Return true if we should assign DECL a pseudo register; false if it
2108 should live on the local stack. */
2111 use_register_for_decl (const_tree decl
)
2113 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2116 /* Honor volatile. */
2117 if (TREE_SIDE_EFFECTS (decl
))
2120 /* Honor addressability. */
2121 if (TREE_ADDRESSABLE (decl
))
2124 /* Decl is implicitly addressible by bound stores and loads
2125 if it is an aggregate holding bounds. */
2126 if (chkp_function_instrumented_p (current_function_decl
)
2128 && !BOUNDED_P (decl
)
2129 && chkp_type_has_pointer (TREE_TYPE (decl
)))
2132 /* Only register-like things go in registers. */
2133 if (DECL_MODE (decl
) == BLKmode
)
2136 /* If -ffloat-store specified, don't put explicit float variables
2138 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2139 propagates values across these stores, and it probably shouldn't. */
2140 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2143 /* If we're not interested in tracking debugging information for
2144 this decl, then we can certainly put it in a register. */
2145 if (DECL_IGNORED_P (decl
))
2151 if (!DECL_REGISTER (decl
))
2154 switch (TREE_CODE (TREE_TYPE (decl
)))
2158 case QUAL_UNION_TYPE
:
2159 /* When not optimizing, disregard register keyword for variables with
2160 types containing methods, otherwise the methods won't be callable
2161 from the debugger. */
2162 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))))
2172 /* Return true if TYPE should be passed by invisible reference. */
2175 pass_by_reference (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
2176 tree type
, bool named_arg
)
2180 /* If this type contains non-trivial constructors, then it is
2181 forbidden for the middle-end to create any new copies. */
2182 if (TREE_ADDRESSABLE (type
))
2185 /* GCC post 3.4 passes *all* variable sized types by reference. */
2186 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2189 /* If a record type should be passed the same as its first (and only)
2190 member, use the type and mode of that member. */
2191 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2193 type
= TREE_TYPE (first_field (type
));
2194 mode
= TYPE_MODE (type
);
2198 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2202 /* Return true if TYPE, which is passed by reference, should be callee
2203 copied instead of caller copied. */
2206 reference_callee_copied (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
2207 tree type
, bool named_arg
)
2209 if (type
&& TREE_ADDRESSABLE (type
))
2211 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2215 /* Structures to communicate between the subroutines of assign_parms.
2216 The first holds data persistent across all parameters, the second
2217 is cleared out for each parameter. */
2219 struct assign_parm_data_all
2221 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2222 should become a job of the target or otherwise encapsulated. */
2223 CUMULATIVE_ARGS args_so_far_v
;
2224 cumulative_args_t args_so_far
;
2225 struct args_size stack_args_size
;
2226 tree function_result_decl
;
2228 rtx_insn
*first_conversion_insn
;
2229 rtx_insn
*last_conversion_insn
;
2230 HOST_WIDE_INT pretend_args_size
;
2231 HOST_WIDE_INT extra_pretend_bytes
;
2232 int reg_parm_stack_space
;
2235 struct assign_parm_data_one
2241 machine_mode nominal_mode
;
2242 machine_mode passed_mode
;
2243 machine_mode promoted_mode
;
2244 struct locate_and_pad_arg_data locate
;
2246 BOOL_BITFIELD named_arg
: 1;
2247 BOOL_BITFIELD passed_pointer
: 1;
2248 BOOL_BITFIELD on_stack
: 1;
2249 BOOL_BITFIELD loaded_in_reg
: 1;
2252 struct bounds_parm_data
2254 assign_parm_data_one parm_data
;
2261 /* A subroutine of assign_parms. Initialize ALL. */
2264 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2266 tree fntype ATTRIBUTE_UNUSED
;
2268 memset (all
, 0, sizeof (*all
));
2270 fntype
= TREE_TYPE (current_function_decl
);
2272 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2273 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2275 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2276 current_function_decl
, -1);
2278 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2280 #ifdef INCOMING_REG_PARM_STACK_SPACE
2281 all
->reg_parm_stack_space
2282 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2286 /* If ARGS contains entries with complex types, split the entry into two
2287 entries of the component type. Return a new list of substitutions are
2288 needed, else the old list. */
2291 split_complex_args (vec
<tree
> *args
)
2296 FOR_EACH_VEC_ELT (*args
, i
, p
)
2298 tree type
= TREE_TYPE (p
);
2299 if (TREE_CODE (type
) == COMPLEX_TYPE
2300 && targetm
.calls
.split_complex_arg (type
))
2303 tree subtype
= TREE_TYPE (type
);
2304 bool addressable
= TREE_ADDRESSABLE (p
);
2306 /* Rewrite the PARM_DECL's type with its component. */
2308 TREE_TYPE (p
) = subtype
;
2309 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2310 DECL_MODE (p
) = VOIDmode
;
2311 DECL_SIZE (p
) = NULL
;
2312 DECL_SIZE_UNIT (p
) = NULL
;
2313 /* If this arg must go in memory, put it in a pseudo here.
2314 We can't allow it to go in memory as per normal parms,
2315 because the usual place might not have the imag part
2316 adjacent to the real part. */
2317 DECL_ARTIFICIAL (p
) = addressable
;
2318 DECL_IGNORED_P (p
) = addressable
;
2319 TREE_ADDRESSABLE (p
) = 0;
2323 /* Build a second synthetic decl. */
2324 decl
= build_decl (EXPR_LOCATION (p
),
2325 PARM_DECL
, NULL_TREE
, subtype
);
2326 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2327 DECL_ARTIFICIAL (decl
) = addressable
;
2328 DECL_IGNORED_P (decl
) = addressable
;
2329 layout_decl (decl
, 0);
2330 args
->safe_insert (++i
, decl
);
2335 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2336 the hidden struct return argument, and (abi willing) complex args.
2337 Return the new parameter list. */
2340 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2342 tree fndecl
= current_function_decl
;
2343 tree fntype
= TREE_TYPE (fndecl
);
2344 vec
<tree
> fnargs
= vNULL
;
2347 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2348 fnargs
.safe_push (arg
);
2350 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2352 /* If struct value address is treated as the first argument, make it so. */
2353 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2354 && ! cfun
->returns_pcc_struct
2355 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2357 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2360 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2361 PARM_DECL
, get_identifier (".result_ptr"), type
);
2362 DECL_ARG_TYPE (decl
) = type
;
2363 DECL_ARTIFICIAL (decl
) = 1;
2364 DECL_NAMELESS (decl
) = 1;
2365 TREE_CONSTANT (decl
) = 1;
2367 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2368 all
->orig_fnargs
= decl
;
2369 fnargs
.safe_insert (0, decl
);
2371 all
->function_result_decl
= decl
;
2373 /* If function is instrumented then bounds of the
2374 passed structure address is the second argument. */
2375 if (chkp_function_instrumented_p (fndecl
))
2377 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2378 PARM_DECL
, get_identifier (".result_bnd"),
2379 pointer_bounds_type_node
);
2380 DECL_ARG_TYPE (decl
) = pointer_bounds_type_node
;
2381 DECL_ARTIFICIAL (decl
) = 1;
2382 DECL_NAMELESS (decl
) = 1;
2383 TREE_CONSTANT (decl
) = 1;
2385 DECL_CHAIN (decl
) = DECL_CHAIN (all
->orig_fnargs
);
2386 DECL_CHAIN (all
->orig_fnargs
) = decl
;
2387 fnargs
.safe_insert (1, decl
);
2391 /* If the target wants to split complex arguments into scalars, do so. */
2392 if (targetm
.calls
.split_complex_arg
)
2393 split_complex_args (&fnargs
);
2398 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2399 data for the parameter. Incorporate ABI specifics such as pass-by-
2400 reference and type promotion. */
2403 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2404 struct assign_parm_data_one
*data
)
2406 tree nominal_type
, passed_type
;
2407 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2410 memset (data
, 0, sizeof (*data
));
2412 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2414 data
->named_arg
= 1; /* No variadic parms. */
2415 else if (DECL_CHAIN (parm
))
2416 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2417 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2418 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2420 data
->named_arg
= 0; /* Treat as variadic. */
2422 nominal_type
= TREE_TYPE (parm
);
2423 passed_type
= DECL_ARG_TYPE (parm
);
2425 /* Look out for errors propagating this far. Also, if the parameter's
2426 type is void then its value doesn't matter. */
2427 if (TREE_TYPE (parm
) == error_mark_node
2428 /* This can happen after weird syntax errors
2429 or if an enum type is defined among the parms. */
2430 || TREE_CODE (parm
) != PARM_DECL
2431 || passed_type
== NULL
2432 || VOID_TYPE_P (nominal_type
))
2434 nominal_type
= passed_type
= void_type_node
;
2435 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2439 /* Find mode of arg as it is passed, and mode of arg as it should be
2440 during execution of this function. */
2441 passed_mode
= TYPE_MODE (passed_type
);
2442 nominal_mode
= TYPE_MODE (nominal_type
);
2444 /* If the parm is to be passed as a transparent union or record, use the
2445 type of the first field for the tests below. We have already verified
2446 that the modes are the same. */
2447 if ((TREE_CODE (passed_type
) == UNION_TYPE
2448 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2449 && TYPE_TRANSPARENT_AGGR (passed_type
))
2450 passed_type
= TREE_TYPE (first_field (passed_type
));
2452 /* See if this arg was passed by invisible reference. */
2453 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2454 passed_type
, data
->named_arg
))
2456 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2457 data
->passed_pointer
= true;
2458 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2461 /* Find mode as it is passed by the ABI. */
2462 unsignedp
= TYPE_UNSIGNED (passed_type
);
2463 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2464 TREE_TYPE (current_function_decl
), 0);
2467 data
->nominal_type
= nominal_type
;
2468 data
->passed_type
= passed_type
;
2469 data
->nominal_mode
= nominal_mode
;
2470 data
->passed_mode
= passed_mode
;
2471 data
->promoted_mode
= promoted_mode
;
2474 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2477 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2478 struct assign_parm_data_one
*data
, bool no_rtl
)
2480 int varargs_pretend_bytes
= 0;
2482 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2483 data
->promoted_mode
,
2485 &varargs_pretend_bytes
, no_rtl
);
2487 /* If the back-end has requested extra stack space, record how much is
2488 needed. Do not change pretend_args_size otherwise since it may be
2489 nonzero from an earlier partial argument. */
2490 if (varargs_pretend_bytes
> 0)
2491 all
->pretend_args_size
= varargs_pretend_bytes
;
2494 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2495 the incoming location of the current parameter. */
2498 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2499 struct assign_parm_data_one
*data
)
2501 HOST_WIDE_INT pretend_bytes
= 0;
2505 if (data
->promoted_mode
== VOIDmode
)
2507 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2511 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2512 data
->promoted_mode
,
2516 if (entry_parm
== 0)
2517 data
->promoted_mode
= data
->passed_mode
;
2519 /* Determine parm's home in the stack, in case it arrives in the stack
2520 or we should pretend it did. Compute the stack position and rtx where
2521 the argument arrives and its size.
2523 There is one complexity here: If this was a parameter that would
2524 have been passed in registers, but wasn't only because it is
2525 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2526 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2527 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2528 as it was the previous time. */
2529 in_regs
= (entry_parm
!= 0) || POINTER_BOUNDS_TYPE_P (data
->passed_type
);
2530 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2533 if (!in_regs
&& !data
->named_arg
)
2535 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2538 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2539 data
->promoted_mode
,
2540 data
->passed_type
, true);
2541 in_regs
= tem
!= NULL
;
2545 /* If this parameter was passed both in registers and in the stack, use
2546 the copy on the stack. */
2547 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2555 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2556 data
->promoted_mode
,
2559 data
->partial
= partial
;
2561 /* The caller might already have allocated stack space for the
2562 register parameters. */
2563 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2565 /* Part of this argument is passed in registers and part
2566 is passed on the stack. Ask the prologue code to extend
2567 the stack part so that we can recreate the full value.
2569 PRETEND_BYTES is the size of the registers we need to store.
2570 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2571 stack space that the prologue should allocate.
2573 Internally, gcc assumes that the argument pointer is aligned
2574 to STACK_BOUNDARY bits. This is used both for alignment
2575 optimizations (see init_emit) and to locate arguments that are
2576 aligned to more than PARM_BOUNDARY bits. We must preserve this
2577 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2578 a stack boundary. */
2580 /* We assume at most one partial arg, and it must be the first
2581 argument on the stack. */
2582 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2584 pretend_bytes
= partial
;
2585 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2587 /* We want to align relative to the actual stack pointer, so
2588 don't include this in the stack size until later. */
2589 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2593 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2594 all
->reg_parm_stack_space
,
2595 entry_parm
? data
->partial
: 0, current_function_decl
,
2596 &all
->stack_args_size
, &data
->locate
);
2598 /* Update parm_stack_boundary if this parameter is passed in the
2600 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2601 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2603 /* Adjust offsets to include the pretend args. */
2604 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2605 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2606 data
->locate
.offset
.constant
+= pretend_bytes
;
2608 data
->entry_parm
= entry_parm
;
2611 /* A subroutine of assign_parms. If there is actually space on the stack
2612 for this parm, count it in stack_args_size and return true. */
2615 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2616 struct assign_parm_data_one
*data
)
2618 /* Bounds are never passed on the stack to keep compatibility
2619 with not instrumented code. */
2620 if (POINTER_BOUNDS_TYPE_P (data
->passed_type
))
2622 /* Trivially true if we've no incoming register. */
2623 else if (data
->entry_parm
== NULL
)
2625 /* Also true if we're partially in registers and partially not,
2626 since we've arranged to drop the entire argument on the stack. */
2627 else if (data
->partial
!= 0)
2629 /* Also true if the target says that it's passed in both registers
2630 and on the stack. */
2631 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2632 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2634 /* Also true if the target says that there's stack allocated for
2635 all register parameters. */
2636 else if (all
->reg_parm_stack_space
> 0)
2638 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2642 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2643 if (data
->locate
.size
.var
)
2644 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2649 /* A subroutine of assign_parms. Given that this parameter is allocated
2650 stack space by the ABI, find it. */
2653 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2655 rtx offset_rtx
, stack_parm
;
2656 unsigned int align
, boundary
;
2658 /* If we're passing this arg using a reg, make its stack home the
2659 aligned stack slot. */
2660 if (data
->entry_parm
)
2661 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2663 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2665 stack_parm
= crtl
->args
.internal_arg_pointer
;
2666 if (offset_rtx
!= const0_rtx
)
2667 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2668 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2670 if (!data
->passed_pointer
)
2672 set_mem_attributes (stack_parm
, parm
, 1);
2673 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2674 while promoted mode's size is needed. */
2675 if (data
->promoted_mode
!= BLKmode
2676 && data
->promoted_mode
!= DECL_MODE (parm
))
2678 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2679 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2681 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2682 data
->promoted_mode
);
2684 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2689 boundary
= data
->locate
.boundary
;
2690 align
= BITS_PER_UNIT
;
2692 /* If we're padding upward, we know that the alignment of the slot
2693 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2694 intentionally forcing upward padding. Otherwise we have to come
2695 up with a guess at the alignment based on OFFSET_RTX. */
2696 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2698 else if (CONST_INT_P (offset_rtx
))
2700 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2701 align
= align
& -align
;
2703 set_mem_align (stack_parm
, align
);
2705 if (data
->entry_parm
)
2706 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2708 data
->stack_parm
= stack_parm
;
2711 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2712 always valid and contiguous. */
2715 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2717 rtx entry_parm
= data
->entry_parm
;
2718 rtx stack_parm
= data
->stack_parm
;
2720 /* If this parm was passed part in regs and part in memory, pretend it
2721 arrived entirely in memory by pushing the register-part onto the stack.
2722 In the special case of a DImode or DFmode that is split, we could put
2723 it together in a pseudoreg directly, but for now that's not worth
2725 if (data
->partial
!= 0)
2727 /* Handle calls that pass values in multiple non-contiguous
2728 locations. The Irix 6 ABI has examples of this. */
2729 if (GET_CODE (entry_parm
) == PARALLEL
)
2730 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2732 int_size_in_bytes (data
->passed_type
));
2735 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2736 move_block_from_reg (REGNO (entry_parm
),
2737 validize_mem (copy_rtx (stack_parm
)),
2738 data
->partial
/ UNITS_PER_WORD
);
2741 entry_parm
= stack_parm
;
2744 /* If we didn't decide this parm came in a register, by default it came
2746 else if (entry_parm
== NULL
)
2747 entry_parm
= stack_parm
;
2749 /* When an argument is passed in multiple locations, we can't make use
2750 of this information, but we can save some copying if the whole argument
2751 is passed in a single register. */
2752 else if (GET_CODE (entry_parm
) == PARALLEL
2753 && data
->nominal_mode
!= BLKmode
2754 && data
->passed_mode
!= BLKmode
)
2756 size_t i
, len
= XVECLEN (entry_parm
, 0);
2758 for (i
= 0; i
< len
; i
++)
2759 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2760 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2761 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2762 == data
->passed_mode
)
2763 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2765 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2770 data
->entry_parm
= entry_parm
;
2773 /* A subroutine of assign_parms. Reconstitute any values which were
2774 passed in multiple registers and would fit in a single register. */
2777 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2779 rtx entry_parm
= data
->entry_parm
;
2781 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2782 This can be done with register operations rather than on the
2783 stack, even if we will store the reconstituted parameter on the
2785 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2787 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2788 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2789 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2790 entry_parm
= parmreg
;
2793 data
->entry_parm
= entry_parm
;
2796 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2797 always valid and properly aligned. */
2800 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2802 rtx stack_parm
= data
->stack_parm
;
2804 /* If we can't trust the parm stack slot to be aligned enough for its
2805 ultimate type, don't use that slot after entry. We'll make another
2806 stack slot, if we need one. */
2808 && ((STRICT_ALIGNMENT
2809 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2810 || (data
->nominal_type
2811 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2812 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2815 /* If parm was passed in memory, and we need to convert it on entry,
2816 don't store it back in that same slot. */
2817 else if (data
->entry_parm
== stack_parm
2818 && data
->nominal_mode
!= BLKmode
2819 && data
->nominal_mode
!= data
->passed_mode
)
2822 /* If stack protection is in effect for this function, don't leave any
2823 pointers in their passed stack slots. */
2824 else if (crtl
->stack_protect_guard
2825 && (flag_stack_protect
== 2
2826 || data
->passed_pointer
2827 || POINTER_TYPE_P (data
->nominal_type
)))
2830 data
->stack_parm
= stack_parm
;
2833 /* A subroutine of assign_parms. Return true if the current parameter
2834 should be stored as a BLKmode in the current frame. */
2837 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2839 if (data
->nominal_mode
== BLKmode
)
2841 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2844 #ifdef BLOCK_REG_PADDING
2845 /* Only assign_parm_setup_block knows how to deal with register arguments
2846 that are padded at the least significant end. */
2847 if (REG_P (data
->entry_parm
)
2848 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2849 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2850 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2857 /* A subroutine of assign_parms. Arrange for the parameter to be
2858 present and valid in DATA->STACK_RTL. */
2861 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2862 tree parm
, struct assign_parm_data_one
*data
)
2864 rtx entry_parm
= data
->entry_parm
;
2865 rtx stack_parm
= data
->stack_parm
;
2867 HOST_WIDE_INT size_stored
;
2869 if (GET_CODE (entry_parm
) == PARALLEL
)
2870 entry_parm
= emit_group_move_into_temps (entry_parm
);
2872 size
= int_size_in_bytes (data
->passed_type
);
2873 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2874 if (stack_parm
== 0)
2876 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2877 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2879 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2880 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2881 set_mem_attributes (stack_parm
, parm
, 1);
2884 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2885 calls that pass values in multiple non-contiguous locations. */
2886 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2890 /* Note that we will be storing an integral number of words.
2891 So we have to be careful to ensure that we allocate an
2892 integral number of words. We do this above when we call
2893 assign_stack_local if space was not allocated in the argument
2894 list. If it was, this will not work if PARM_BOUNDARY is not
2895 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2896 if it becomes a problem. Exception is when BLKmode arrives
2897 with arguments not conforming to word_mode. */
2899 if (data
->stack_parm
== 0)
2901 else if (GET_CODE (entry_parm
) == PARALLEL
)
2904 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2906 mem
= validize_mem (copy_rtx (stack_parm
));
2908 /* Handle values in multiple non-contiguous locations. */
2909 if (GET_CODE (entry_parm
) == PARALLEL
)
2911 push_to_sequence2 (all
->first_conversion_insn
,
2912 all
->last_conversion_insn
);
2913 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2914 all
->first_conversion_insn
= get_insns ();
2915 all
->last_conversion_insn
= get_last_insn ();
2922 /* If SIZE is that of a mode no bigger than a word, just use
2923 that mode's store operation. */
2924 else if (size
<= UNITS_PER_WORD
)
2927 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2930 #ifdef BLOCK_REG_PADDING
2931 && (size
== UNITS_PER_WORD
2932 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2933 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2939 /* We are really truncating a word_mode value containing
2940 SIZE bytes into a value of mode MODE. If such an
2941 operation requires no actual instructions, we can refer
2942 to the value directly in mode MODE, otherwise we must
2943 start with the register in word_mode and explicitly
2945 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2946 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2949 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2950 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2952 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2955 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2956 machine must be aligned to the left before storing
2957 to memory. Note that the previous test doesn't
2958 handle all cases (e.g. SIZE == 3). */
2959 else if (size
!= UNITS_PER_WORD
2960 #ifdef BLOCK_REG_PADDING
2961 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2969 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2970 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2972 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2973 tem
= change_address (mem
, word_mode
, 0);
2974 emit_move_insn (tem
, x
);
2977 move_block_from_reg (REGNO (entry_parm
), mem
,
2978 size_stored
/ UNITS_PER_WORD
);
2981 move_block_from_reg (REGNO (entry_parm
), mem
,
2982 size_stored
/ UNITS_PER_WORD
);
2984 else if (data
->stack_parm
== 0)
2986 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2987 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2989 all
->first_conversion_insn
= get_insns ();
2990 all
->last_conversion_insn
= get_last_insn ();
2994 data
->stack_parm
= stack_parm
;
2995 SET_DECL_RTL (parm
, stack_parm
);
2998 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2999 parameter. Get it there. Perform all ABI specified conversions. */
3002 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3003 struct assign_parm_data_one
*data
)
3005 rtx parmreg
, validated_mem
;
3006 rtx equiv_stack_parm
;
3007 machine_mode promoted_nominal_mode
;
3008 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3009 bool did_conversion
= false;
3010 bool need_conversion
, moved
;
3012 /* Store the parm in a pseudoregister during the function, but we may
3013 need to do it in a wider mode. Using 2 here makes the result
3014 consistent with promote_decl_mode and thus expand_expr_real_1. */
3015 promoted_nominal_mode
3016 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3017 TREE_TYPE (current_function_decl
), 2);
3019 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3021 if (!DECL_ARTIFICIAL (parm
))
3022 mark_user_reg (parmreg
);
3024 /* If this was an item that we received a pointer to,
3025 set DECL_RTL appropriately. */
3026 if (data
->passed_pointer
)
3028 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3029 set_mem_attributes (x
, parm
, 1);
3030 SET_DECL_RTL (parm
, x
);
3033 SET_DECL_RTL (parm
, parmreg
);
3035 assign_parm_remove_parallels (data
);
3037 /* Copy the value into the register, thus bridging between
3038 assign_parm_find_data_types and expand_expr_real_1. */
3040 equiv_stack_parm
= data
->stack_parm
;
3041 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3043 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3044 || promoted_nominal_mode
!= data
->promoted_mode
);
3048 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3049 && data
->nominal_mode
== data
->passed_mode
3050 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3052 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3053 mode, by the caller. We now have to convert it to
3054 NOMINAL_MODE, if different. However, PARMREG may be in
3055 a different mode than NOMINAL_MODE if it is being stored
3058 If ENTRY_PARM is a hard register, it might be in a register
3059 not valid for operating in its mode (e.g., an odd-numbered
3060 register for a DFmode). In that case, moves are the only
3061 thing valid, so we can't do a convert from there. This
3062 occurs when the calling sequence allow such misaligned
3065 In addition, the conversion may involve a call, which could
3066 clobber parameters which haven't been copied to pseudo
3069 First, we try to emit an insn which performs the necessary
3070 conversion. We verify that this insn does not clobber any
3073 enum insn_code icode
;
3076 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3080 op1
= validated_mem
;
3081 if (icode
!= CODE_FOR_nothing
3082 && insn_operand_matches (icode
, 0, op0
)
3083 && insn_operand_matches (icode
, 1, op1
))
3085 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3086 rtx_insn
*insn
, *insns
;
3088 HARD_REG_SET hardregs
;
3091 /* If op1 is a hard register that is likely spilled, first
3092 force it into a pseudo, otherwise combiner might extend
3093 its lifetime too much. */
3094 if (GET_CODE (t
) == SUBREG
)
3097 && HARD_REGISTER_P (t
)
3098 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3099 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3101 t
= gen_reg_rtx (GET_MODE (op1
));
3102 emit_move_insn (t
, op1
);
3106 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3107 data
->passed_mode
, unsignedp
);
3109 insns
= get_insns ();
3112 CLEAR_HARD_REG_SET (hardregs
);
3113 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3116 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3118 if (!hard_reg_set_empty_p (hardregs
))
3127 if (equiv_stack_parm
!= NULL_RTX
)
3128 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3135 /* Nothing to do. */
3137 else if (need_conversion
)
3139 /* We did not have an insn to convert directly, or the sequence
3140 generated appeared unsafe. We must first copy the parm to a
3141 pseudo reg, and save the conversion until after all
3142 parameters have been moved. */
3145 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3147 emit_move_insn (tempreg
, validated_mem
);
3149 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3150 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3152 if (GET_CODE (tempreg
) == SUBREG
3153 && GET_MODE (tempreg
) == data
->nominal_mode
3154 && REG_P (SUBREG_REG (tempreg
))
3155 && data
->nominal_mode
== data
->passed_mode
3156 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3157 && GET_MODE_SIZE (GET_MODE (tempreg
))
3158 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3160 /* The argument is already sign/zero extended, so note it
3162 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3163 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3166 /* TREE_USED gets set erroneously during expand_assignment. */
3167 save_tree_used
= TREE_USED (parm
);
3168 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3169 TREE_USED (parm
) = save_tree_used
;
3170 all
->first_conversion_insn
= get_insns ();
3171 all
->last_conversion_insn
= get_last_insn ();
3174 did_conversion
= true;
3177 emit_move_insn (parmreg
, validated_mem
);
3179 /* If we were passed a pointer but the actual value can safely live
3180 in a register, retrieve it and use it directly. */
3181 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3183 /* We can't use nominal_mode, because it will have been set to
3184 Pmode above. We must use the actual mode of the parm. */
3185 if (use_register_for_decl (parm
))
3187 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3188 mark_user_reg (parmreg
);
3192 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3193 TYPE_MODE (TREE_TYPE (parm
)),
3194 TYPE_ALIGN (TREE_TYPE (parm
)));
3196 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3197 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3199 set_mem_attributes (parmreg
, parm
, 1);
3202 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3204 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3205 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3207 push_to_sequence2 (all
->first_conversion_insn
,
3208 all
->last_conversion_insn
);
3209 emit_move_insn (tempreg
, DECL_RTL (parm
));
3210 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3211 emit_move_insn (parmreg
, tempreg
);
3212 all
->first_conversion_insn
= get_insns ();
3213 all
->last_conversion_insn
= get_last_insn ();
3216 did_conversion
= true;
3219 emit_move_insn (parmreg
, DECL_RTL (parm
));
3221 SET_DECL_RTL (parm
, parmreg
);
3223 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3225 data
->stack_parm
= NULL
;
3228 /* Mark the register as eliminable if we did no conversion and it was
3229 copied from memory at a fixed offset, and the arg pointer was not
3230 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3231 offset formed an invalid address, such memory-equivalences as we
3232 make here would screw up life analysis for it. */
3233 if (data
->nominal_mode
== data
->passed_mode
3235 && data
->stack_parm
!= 0
3236 && MEM_P (data
->stack_parm
)
3237 && data
->locate
.offset
.var
== 0
3238 && reg_mentioned_p (virtual_incoming_args_rtx
,
3239 XEXP (data
->stack_parm
, 0)))
3241 rtx_insn
*linsn
= get_last_insn ();
3245 /* Mark complex types separately. */
3246 if (GET_CODE (parmreg
) == CONCAT
)
3248 machine_mode submode
3249 = GET_MODE_INNER (GET_MODE (parmreg
));
3250 int regnor
= REGNO (XEXP (parmreg
, 0));
3251 int regnoi
= REGNO (XEXP (parmreg
, 1));
3252 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3253 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3254 GET_MODE_SIZE (submode
));
3256 /* Scan backwards for the set of the real and
3258 for (sinsn
= linsn
; sinsn
!= 0;
3259 sinsn
= prev_nonnote_insn (sinsn
))
3261 set
= single_set (sinsn
);
3265 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3266 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3267 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3268 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3272 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3275 /* For pointer data type, suggest pointer register. */
3276 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3277 mark_reg_pointer (parmreg
,
3278 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3281 /* A subroutine of assign_parms. Allocate stack space to hold the current
3282 parameter. Get it there. Perform all ABI specified conversions. */
3285 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3286 struct assign_parm_data_one
*data
)
3288 /* Value must be stored in the stack slot STACK_PARM during function
3290 bool to_conversion
= false;
3292 assign_parm_remove_parallels (data
);
3294 if (data
->promoted_mode
!= data
->nominal_mode
)
3296 /* Conversion is required. */
3297 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3299 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3301 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3302 to_conversion
= true;
3304 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3305 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3307 if (data
->stack_parm
)
3309 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3310 GET_MODE (data
->stack_parm
));
3311 /* ??? This may need a big-endian conversion on sparc64. */
3313 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3314 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3315 set_mem_offset (data
->stack_parm
,
3316 MEM_OFFSET (data
->stack_parm
) + offset
);
3320 if (data
->entry_parm
!= data
->stack_parm
)
3324 if (data
->stack_parm
== 0)
3326 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3327 GET_MODE (data
->entry_parm
),
3328 TYPE_ALIGN (data
->passed_type
));
3330 = assign_stack_local (GET_MODE (data
->entry_parm
),
3331 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3333 set_mem_attributes (data
->stack_parm
, parm
, 1);
3336 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3337 src
= validize_mem (copy_rtx (data
->entry_parm
));
3341 /* Use a block move to handle potentially misaligned entry_parm. */
3343 push_to_sequence2 (all
->first_conversion_insn
,
3344 all
->last_conversion_insn
);
3345 to_conversion
= true;
3347 emit_block_move (dest
, src
,
3348 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3352 emit_move_insn (dest
, src
);
3357 all
->first_conversion_insn
= get_insns ();
3358 all
->last_conversion_insn
= get_last_insn ();
3362 SET_DECL_RTL (parm
, data
->stack_parm
);
3365 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3366 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3369 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3373 tree orig_fnargs
= all
->orig_fnargs
;
3376 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3378 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3379 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3381 rtx tmp
, real
, imag
;
3382 machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3384 real
= DECL_RTL (fnargs
[i
]);
3385 imag
= DECL_RTL (fnargs
[i
+ 1]);
3386 if (inner
!= GET_MODE (real
))
3388 real
= gen_lowpart_SUBREG (inner
, real
);
3389 imag
= gen_lowpart_SUBREG (inner
, imag
);
3392 if (TREE_ADDRESSABLE (parm
))
3395 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3396 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3398 TYPE_ALIGN (TREE_TYPE (parm
)));
3400 /* split_complex_arg put the real and imag parts in
3401 pseudos. Move them to memory. */
3402 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3403 set_mem_attributes (tmp
, parm
, 1);
3404 rmem
= adjust_address_nv (tmp
, inner
, 0);
3405 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3406 push_to_sequence2 (all
->first_conversion_insn
,
3407 all
->last_conversion_insn
);
3408 emit_move_insn (rmem
, real
);
3409 emit_move_insn (imem
, imag
);
3410 all
->first_conversion_insn
= get_insns ();
3411 all
->last_conversion_insn
= get_last_insn ();
3415 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3416 SET_DECL_RTL (parm
, tmp
);
3418 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3419 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3420 if (inner
!= GET_MODE (real
))
3422 real
= gen_lowpart_SUBREG (inner
, real
);
3423 imag
= gen_lowpart_SUBREG (inner
, imag
);
3425 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3426 set_decl_incoming_rtl (parm
, tmp
, false);
3432 /* Load bounds of PARM from bounds table. */
3434 assign_parm_load_bounds (struct assign_parm_data_one
*data
,
3440 unsigned i
, offs
= 0;
3442 rtx slot
= NULL
, ptr
= NULL
;
3447 bitmap_obstack_initialize (NULL
);
3448 slots
= BITMAP_ALLOC (NULL
);
3449 chkp_find_bound_slots (TREE_TYPE (parm
), slots
);
3450 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, i
, bi
)
3460 BITMAP_FREE (slots
);
3461 bitmap_obstack_release (NULL
);
3464 /* We may have bounds not associated with any pointer. */
3466 offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
3468 /* Find associated pointer. */
3471 /* If bounds are not associated with any bounds,
3472 then it is passed in a register or special slot. */
3473 gcc_assert (data
->entry_parm
);
3476 else if (MEM_P (entry
))
3477 slot
= adjust_address (entry
, Pmode
, offs
);
3478 else if (REG_P (entry
))
3479 ptr
= gen_rtx_REG (Pmode
, REGNO (entry
) + bnd_no
);
3480 else if (GET_CODE (entry
) == PARALLEL
)
3481 ptr
= chkp_get_value_with_offs (entry
, GEN_INT (offs
));
3484 data
->entry_parm
= targetm
.calls
.load_bounds_for_arg (slot
, ptr
,
3488 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3491 assign_bounds (vec
<bounds_parm_data
> &bndargs
,
3492 struct assign_parm_data_all
&all
,
3493 bool assign_regs
, bool assign_special
,
3497 bounds_parm_data
*pbdata
;
3499 if (!bndargs
.exists ())
3502 /* We make few passes to store input bounds. Firstly handle bounds
3503 passed in registers. After that we load bounds passed in special
3504 slots. Finally we load bounds from Bounds Table. */
3505 for (pass
= 0; pass
< 3; pass
++)
3506 FOR_EACH_VEC_ELT (bndargs
, i
, pbdata
)
3508 /* Pass 0 => regs only. */
3511 ||(!pbdata
->parm_data
.entry_parm
3512 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)))
3514 /* Pass 1 => slots only. */
3517 || (!pbdata
->parm_data
.entry_parm
3518 || GET_CODE (pbdata
->parm_data
.entry_parm
) == REG
)))
3520 /* Pass 2 => BT only. */
3523 || pbdata
->parm_data
.entry_parm
))
3526 if (!pbdata
->parm_data
.entry_parm
3527 || GET_CODE (pbdata
->parm_data
.entry_parm
) != REG
)
3528 assign_parm_load_bounds (&pbdata
->parm_data
, pbdata
->ptr_parm
,
3529 pbdata
->ptr_entry
, pbdata
->bound_no
);
3531 set_decl_incoming_rtl (pbdata
->bounds_parm
,
3532 pbdata
->parm_data
.entry_parm
, false);
3534 if (assign_parm_setup_block_p (&pbdata
->parm_data
))
3535 assign_parm_setup_block (&all
, pbdata
->bounds_parm
,
3536 &pbdata
->parm_data
);
3537 else if (pbdata
->parm_data
.passed_pointer
3538 || use_register_for_decl (pbdata
->bounds_parm
))
3539 assign_parm_setup_reg (&all
, pbdata
->bounds_parm
,
3540 &pbdata
->parm_data
);
3542 assign_parm_setup_stack (&all
, pbdata
->bounds_parm
,
3543 &pbdata
->parm_data
);
3547 /* Assign RTL expressions to the function's parameters. This may involve
3548 copying them into registers and using those registers as the DECL_RTL. */
3551 assign_parms (tree fndecl
)
3553 struct assign_parm_data_all all
;
3556 unsigned i
, bound_no
= 0;
3557 tree last_arg
= NULL
;
3558 rtx last_arg_entry
= NULL
;
3559 vec
<bounds_parm_data
> bndargs
= vNULL
;
3560 bounds_parm_data bdata
;
3562 crtl
->args
.internal_arg_pointer
3563 = targetm
.calls
.internal_arg_pointer ();
3565 assign_parms_initialize_all (&all
);
3566 fnargs
= assign_parms_augmented_arg_list (&all
);
3568 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3570 struct assign_parm_data_one data
;
3572 /* Extract the type of PARM; adjust it according to ABI. */
3573 assign_parm_find_data_types (&all
, parm
, &data
);
3575 /* Early out for errors and void parameters. */
3576 if (data
.passed_mode
== VOIDmode
)
3578 SET_DECL_RTL (parm
, const0_rtx
);
3579 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3583 /* Estimate stack alignment from parameter alignment. */
3584 if (SUPPORTS_STACK_ALIGNMENT
)
3587 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3589 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3591 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3592 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3593 TYPE_MODE (data
.nominal_type
),
3594 TYPE_ALIGN (data
.nominal_type
));
3595 if (crtl
->stack_alignment_estimated
< align
)
3597 gcc_assert (!crtl
->stack_realign_processed
);
3598 crtl
->stack_alignment_estimated
= align
;
3602 /* Find out where the parameter arrives in this function. */
3603 assign_parm_find_entry_rtl (&all
, &data
);
3605 /* Find out where stack space for this parameter might be. */
3606 if (assign_parm_is_stack_parm (&all
, &data
))
3608 assign_parm_find_stack_rtl (parm
, &data
);
3609 assign_parm_adjust_entry_rtl (&data
);
3611 if (!POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3613 /* Remember where last non bounds arg was passed in case
3614 we have to load associated bounds for it from Bounds
3617 last_arg_entry
= data
.entry_parm
;
3620 /* Record permanently how this parm was passed. */
3621 if (data
.passed_pointer
)
3624 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3626 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3629 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3631 /* Boudns should be loaded in the particular order to
3632 have registers allocated correctly. Collect info about
3633 input bounds and load them later. */
3634 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3636 /* Expect bounds in instrumented functions only. */
3637 gcc_assert (chkp_function_instrumented_p (fndecl
));
3639 bdata
.parm_data
= data
;
3640 bdata
.bounds_parm
= parm
;
3641 bdata
.ptr_parm
= last_arg
;
3642 bdata
.ptr_entry
= last_arg_entry
;
3643 bdata
.bound_no
= bound_no
;
3644 bndargs
.safe_push (bdata
);
3648 assign_parm_adjust_stack_rtl (&data
);
3650 if (assign_parm_setup_block_p (&data
))
3651 assign_parm_setup_block (&all
, parm
, &data
);
3652 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3653 assign_parm_setup_reg (&all
, parm
, &data
);
3655 assign_parm_setup_stack (&all
, parm
, &data
);
3658 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3660 int pretend_bytes
= 0;
3662 assign_parms_setup_varargs (&all
, &data
, false);
3664 if (chkp_function_instrumented_p (fndecl
))
3666 /* We expect this is the last parm. Otherwise it is wrong
3667 to assign bounds right now. */
3668 gcc_assert (i
== (fnargs
.length () - 1));
3669 assign_bounds (bndargs
, all
, true, false, false);
3670 targetm
.calls
.setup_incoming_vararg_bounds (all
.args_so_far
,
3675 assign_bounds (bndargs
, all
, false, true, true);
3680 /* Update info on where next arg arrives in registers. */
3681 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3682 data
.passed_type
, data
.named_arg
);
3684 if (POINTER_BOUNDS_TYPE_P (data
.passed_type
))
3688 assign_bounds (bndargs
, all
, true, true, true);
3691 if (targetm
.calls
.split_complex_arg
)
3692 assign_parms_unsplit_complex (&all
, fnargs
);
3696 /* Output all parameter conversion instructions (possibly including calls)
3697 now that all parameters have been copied out of hard registers. */
3698 emit_insn (all
.first_conversion_insn
);
3700 /* Estimate reload stack alignment from scalar return mode. */
3701 if (SUPPORTS_STACK_ALIGNMENT
)
3703 if (DECL_RESULT (fndecl
))
3705 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3706 machine_mode mode
= TYPE_MODE (type
);
3710 && !AGGREGATE_TYPE_P (type
))
3712 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3713 if (crtl
->stack_alignment_estimated
< align
)
3715 gcc_assert (!crtl
->stack_realign_processed
);
3716 crtl
->stack_alignment_estimated
= align
;
3722 /* If we are receiving a struct value address as the first argument, set up
3723 the RTL for the function result. As this might require code to convert
3724 the transmitted address to Pmode, we do this here to ensure that possible
3725 preliminary conversions of the address have been emitted already. */
3726 if (all
.function_result_decl
)
3728 tree result
= DECL_RESULT (current_function_decl
);
3729 rtx addr
= DECL_RTL (all
.function_result_decl
);
3732 if (DECL_BY_REFERENCE (result
))
3734 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3739 SET_DECL_VALUE_EXPR (result
,
3740 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3741 all
.function_result_decl
));
3742 addr
= convert_memory_address (Pmode
, addr
);
3743 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3744 set_mem_attributes (x
, result
, 1);
3747 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3749 SET_DECL_RTL (result
, x
);
3752 /* We have aligned all the args, so add space for the pretend args. */
3753 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3754 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3755 crtl
->args
.size
= all
.stack_args_size
.constant
;
3757 /* Adjust function incoming argument size for alignment and
3760 crtl
->args
.size
= MAX (crtl
->args
.size
, all
.reg_parm_stack_space
);
3761 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3762 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3764 if (ARGS_GROW_DOWNWARD
)
3766 crtl
->args
.arg_offset_rtx
3767 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3768 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3769 size_int (-all
.stack_args_size
.constant
)),
3770 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3773 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3775 /* See how many bytes, if any, of its args a function should try to pop
3778 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3782 /* For stdarg.h function, save info about
3783 regs and stack space used by the named args. */
3785 crtl
->args
.info
= all
.args_so_far_v
;
3787 /* Set the rtx used for the function return value. Put this in its
3788 own variable so any optimizers that need this information don't have
3789 to include tree.h. Do this here so it gets done when an inlined
3790 function gets output. */
3793 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3794 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3796 /* If scalar return value was computed in a pseudo-reg, or was a named
3797 return value that got dumped to the stack, copy that to the hard
3799 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3801 tree decl_result
= DECL_RESULT (fndecl
);
3802 rtx decl_rtl
= DECL_RTL (decl_result
);
3804 if (REG_P (decl_rtl
)
3805 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3806 : DECL_REGISTER (decl_result
))
3810 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3812 if (chkp_function_instrumented_p (fndecl
))
3814 = targetm
.calls
.chkp_function_value_bounds (TREE_TYPE (decl_result
),
3816 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3817 /* The delay slot scheduler assumes that crtl->return_rtx
3818 holds the hard register containing the return value, not a
3819 temporary pseudo. */
3820 crtl
->return_rtx
= real_decl_rtl
;
3825 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3826 For all seen types, gimplify their sizes. */
3829 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3836 if (POINTER_TYPE_P (t
))
3838 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3839 && !TYPE_SIZES_GIMPLIFIED (t
))
3841 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3849 /* Gimplify the parameter list for current_function_decl. This involves
3850 evaluating SAVE_EXPRs of variable sized parameters and generating code
3851 to implement callee-copies reference parameters. Returns a sequence of
3852 statements to add to the beginning of the function. */
3855 gimplify_parameters (void)
3857 struct assign_parm_data_all all
;
3859 gimple_seq stmts
= NULL
;
3863 assign_parms_initialize_all (&all
);
3864 fnargs
= assign_parms_augmented_arg_list (&all
);
3866 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3868 struct assign_parm_data_one data
;
3870 /* Extract the type of PARM; adjust it according to ABI. */
3871 assign_parm_find_data_types (&all
, parm
, &data
);
3873 /* Early out for errors and void parameters. */
3874 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3877 /* Update info on where next arg arrives in registers. */
3878 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3879 data
.passed_type
, data
.named_arg
);
3881 /* ??? Once upon a time variable_size stuffed parameter list
3882 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3883 turned out to be less than manageable in the gimple world.
3884 Now we have to hunt them down ourselves. */
3885 walk_tree_without_duplicates (&data
.passed_type
,
3886 gimplify_parm_type
, &stmts
);
3888 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3890 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3891 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3894 if (data
.passed_pointer
)
3896 tree type
= TREE_TYPE (data
.passed_type
);
3897 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3898 type
, data
.named_arg
))
3902 /* For constant-sized objects, this is trivial; for
3903 variable-sized objects, we have to play games. */
3904 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3905 && !(flag_stack_check
== GENERIC_STACK_CHECK
3906 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3907 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3909 local
= create_tmp_var (type
, get_name (parm
));
3910 DECL_IGNORED_P (local
) = 0;
3911 /* If PARM was addressable, move that flag over
3912 to the local copy, as its address will be taken,
3913 not the PARMs. Keep the parms address taken
3914 as we'll query that flag during gimplification. */
3915 if (TREE_ADDRESSABLE (parm
))
3916 TREE_ADDRESSABLE (local
) = 1;
3917 else if (TREE_CODE (type
) == COMPLEX_TYPE
3918 || TREE_CODE (type
) == VECTOR_TYPE
)
3919 DECL_GIMPLE_REG_P (local
) = 1;
3923 tree ptr_type
, addr
;
3925 ptr_type
= build_pointer_type (type
);
3926 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3927 DECL_IGNORED_P (addr
) = 0;
3928 local
= build_fold_indirect_ref (addr
);
3930 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3931 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3932 size_int (DECL_ALIGN (parm
)));
3934 /* The call has been built for a variable-sized object. */
3935 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3936 t
= fold_convert (ptr_type
, t
);
3937 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3938 gimplify_and_add (t
, &stmts
);
3941 gimplify_assign (local
, parm
, &stmts
);
3943 SET_DECL_VALUE_EXPR (parm
, local
);
3944 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3954 /* Compute the size and offset from the start of the stacked arguments for a
3955 parm passed in mode PASSED_MODE and with type TYPE.
3957 INITIAL_OFFSET_PTR points to the current offset into the stacked
3960 The starting offset and size for this parm are returned in
3961 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3962 nonzero, the offset is that of stack slot, which is returned in
3963 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3964 padding required from the initial offset ptr to the stack slot.
3966 IN_REGS is nonzero if the argument will be passed in registers. It will
3967 never be set if REG_PARM_STACK_SPACE is not defined.
3969 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3970 for arguments which are passed in registers.
3972 FNDECL is the function in which the argument was defined.
3974 There are two types of rounding that are done. The first, controlled by
3975 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3976 argument list to be aligned to the specific boundary (in bits). This
3977 rounding affects the initial and starting offsets, but not the argument
3980 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3981 optionally rounds the size of the parm to PARM_BOUNDARY. The
3982 initial offset is not affected by this rounding, while the size always
3983 is and the starting offset may be. */
3985 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3986 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3987 callers pass in the total size of args so far as
3988 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3991 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
3992 int reg_parm_stack_space
, int partial
,
3993 tree fndecl ATTRIBUTE_UNUSED
,
3994 struct args_size
*initial_offset_ptr
,
3995 struct locate_and_pad_arg_data
*locate
)
3998 enum direction where_pad
;
3999 unsigned int boundary
, round_boundary
;
4000 int part_size_in_regs
;
4002 /* If we have found a stack parm before we reach the end of the
4003 area reserved for registers, skip that area. */
4006 if (reg_parm_stack_space
> 0)
4008 if (initial_offset_ptr
->var
)
4010 initial_offset_ptr
->var
4011 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4012 ssize_int (reg_parm_stack_space
));
4013 initial_offset_ptr
->constant
= 0;
4015 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4016 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4020 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4023 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4024 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4025 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4026 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4028 locate
->where_pad
= where_pad
;
4030 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4031 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4032 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4034 locate
->boundary
= boundary
;
4036 if (SUPPORTS_STACK_ALIGNMENT
)
4038 /* stack_alignment_estimated can't change after stack has been
4040 if (crtl
->stack_alignment_estimated
< boundary
)
4042 if (!crtl
->stack_realign_processed
)
4043 crtl
->stack_alignment_estimated
= boundary
;
4046 /* If stack is realigned and stack alignment value
4047 hasn't been finalized, it is OK not to increase
4048 stack_alignment_estimated. The bigger alignment
4049 requirement is recorded in stack_alignment_needed
4051 gcc_assert (!crtl
->stack_realign_finalized
4052 && crtl
->stack_realign_needed
);
4057 /* Remember if the outgoing parameter requires extra alignment on the
4058 calling function side. */
4059 if (crtl
->stack_alignment_needed
< boundary
)
4060 crtl
->stack_alignment_needed
= boundary
;
4061 if (crtl
->preferred_stack_boundary
< boundary
)
4062 crtl
->preferred_stack_boundary
= boundary
;
4064 if (ARGS_GROW_DOWNWARD
)
4066 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4067 if (initial_offset_ptr
->var
)
4068 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4069 initial_offset_ptr
->var
);
4073 if (where_pad
!= none
4074 && (!tree_fits_uhwi_p (sizetree
)
4075 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4076 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4077 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4080 locate
->slot_offset
.constant
+= part_size_in_regs
;
4082 if (!in_regs
|| reg_parm_stack_space
> 0)
4083 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4084 &locate
->alignment_pad
);
4086 locate
->size
.constant
= (-initial_offset_ptr
->constant
4087 - locate
->slot_offset
.constant
);
4088 if (initial_offset_ptr
->var
)
4089 locate
->size
.var
= size_binop (MINUS_EXPR
,
4090 size_binop (MINUS_EXPR
,
4092 initial_offset_ptr
->var
),
4093 locate
->slot_offset
.var
);
4095 /* Pad_below needs the pre-rounded size to know how much to pad
4097 locate
->offset
= locate
->slot_offset
;
4098 if (where_pad
== downward
)
4099 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4104 if (!in_regs
|| reg_parm_stack_space
> 0)
4105 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4106 &locate
->alignment_pad
);
4107 locate
->slot_offset
= *initial_offset_ptr
;
4109 #ifdef PUSH_ROUNDING
4110 if (passed_mode
!= BLKmode
)
4111 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4114 /* Pad_below needs the pre-rounded size to know how much to pad below
4115 so this must be done before rounding up. */
4116 locate
->offset
= locate
->slot_offset
;
4117 if (where_pad
== downward
)
4118 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4120 if (where_pad
!= none
4121 && (!tree_fits_uhwi_p (sizetree
)
4122 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4123 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4125 ADD_PARM_SIZE (locate
->size
, sizetree
);
4127 locate
->size
.constant
-= part_size_in_regs
;
4130 #ifdef FUNCTION_ARG_OFFSET
4131 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
4135 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4136 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4139 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4140 struct args_size
*alignment_pad
)
4142 tree save_var
= NULL_TREE
;
4143 HOST_WIDE_INT save_constant
= 0;
4144 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4145 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
4147 #ifdef SPARC_STACK_BOUNDARY_HACK
4148 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4149 the real alignment of %sp. However, when it does this, the
4150 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4151 if (SPARC_STACK_BOUNDARY_HACK
)
4155 if (boundary
> PARM_BOUNDARY
)
4157 save_var
= offset_ptr
->var
;
4158 save_constant
= offset_ptr
->constant
;
4161 alignment_pad
->var
= NULL_TREE
;
4162 alignment_pad
->constant
= 0;
4164 if (boundary
> BITS_PER_UNIT
)
4166 if (offset_ptr
->var
)
4168 tree sp_offset_tree
= ssize_int (sp_offset
);
4169 tree offset
= size_binop (PLUS_EXPR
,
4170 ARGS_SIZE_TREE (*offset_ptr
),
4173 if (ARGS_GROW_DOWNWARD
)
4174 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4176 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4178 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4179 /* ARGS_SIZE_TREE includes constant term. */
4180 offset_ptr
->constant
= 0;
4181 if (boundary
> PARM_BOUNDARY
)
4182 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4187 offset_ptr
->constant
= -sp_offset
+
4189 ? FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
)
4190 : CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
));
4192 if (boundary
> PARM_BOUNDARY
)
4193 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4199 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4201 if (passed_mode
!= BLKmode
)
4203 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4204 offset_ptr
->constant
4205 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4206 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4207 - GET_MODE_SIZE (passed_mode
));
4211 if (TREE_CODE (sizetree
) != INTEGER_CST
4212 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4214 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4215 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4217 ADD_PARM_SIZE (*offset_ptr
, s2
);
4218 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4224 /* True if register REGNO was alive at a place where `setjmp' was
4225 called and was set more than once or is an argument. Such regs may
4226 be clobbered by `longjmp'. */
4229 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4231 /* There appear to be cases where some local vars never reach the
4232 backend but have bogus regnos. */
4233 if (regno
>= max_reg_num ())
4236 return ((REG_N_SETS (regno
) > 1
4237 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4239 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4242 /* Walk the tree of blocks describing the binding levels within a
4243 function and warn about variables the might be killed by setjmp or
4244 vfork. This is done after calling flow_analysis before register
4245 allocation since that will clobber the pseudo-regs to hard
4249 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4253 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4255 if (TREE_CODE (decl
) == VAR_DECL
4256 && DECL_RTL_SET_P (decl
)
4257 && REG_P (DECL_RTL (decl
))
4258 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4259 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4260 " %<longjmp%> or %<vfork%>", decl
);
4263 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4264 setjmp_vars_warning (setjmp_crosses
, sub
);
4267 /* Do the appropriate part of setjmp_vars_warning
4268 but for arguments instead of local variables. */
4271 setjmp_args_warning (bitmap setjmp_crosses
)
4274 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4275 decl
; decl
= DECL_CHAIN (decl
))
4276 if (DECL_RTL (decl
) != 0
4277 && REG_P (DECL_RTL (decl
))
4278 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4279 warning (OPT_Wclobbered
,
4280 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4284 /* Generate warning messages for variables live across setjmp. */
4287 generate_setjmp_warnings (void)
4289 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4291 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4292 || bitmap_empty_p (setjmp_crosses
))
4295 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4296 setjmp_args_warning (setjmp_crosses
);
4300 /* Reverse the order of elements in the fragment chain T of blocks,
4301 and return the new head of the chain (old last element).
4302 In addition to that clear BLOCK_SAME_RANGE flags when needed
4303 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4304 its super fragment origin. */
4307 block_fragments_nreverse (tree t
)
4309 tree prev
= 0, block
, next
, prev_super
= 0;
4310 tree super
= BLOCK_SUPERCONTEXT (t
);
4311 if (BLOCK_FRAGMENT_ORIGIN (super
))
4312 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4313 for (block
= t
; block
; block
= next
)
4315 next
= BLOCK_FRAGMENT_CHAIN (block
);
4316 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4317 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4318 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4320 BLOCK_SAME_RANGE (block
) = 0;
4321 prev_super
= BLOCK_SUPERCONTEXT (block
);
4322 BLOCK_SUPERCONTEXT (block
) = super
;
4325 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4326 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4328 BLOCK_SAME_RANGE (t
) = 0;
4329 BLOCK_SUPERCONTEXT (t
) = super
;
4333 /* Reverse the order of elements in the chain T of blocks,
4334 and return the new head of the chain (old last element).
4335 Also do the same on subblocks and reverse the order of elements
4336 in BLOCK_FRAGMENT_CHAIN as well. */
4339 blocks_nreverse_all (tree t
)
4341 tree prev
= 0, block
, next
;
4342 for (block
= t
; block
; block
= next
)
4344 next
= BLOCK_CHAIN (block
);
4345 BLOCK_CHAIN (block
) = prev
;
4346 if (BLOCK_FRAGMENT_CHAIN (block
)
4347 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4349 BLOCK_FRAGMENT_CHAIN (block
)
4350 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4351 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4352 BLOCK_SAME_RANGE (block
) = 0;
4354 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4361 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4362 and create duplicate blocks. */
4363 /* ??? Need an option to either create block fragments or to create
4364 abstract origin duplicates of a source block. It really depends
4365 on what optimization has been performed. */
4368 reorder_blocks (void)
4370 tree block
= DECL_INITIAL (current_function_decl
);
4372 if (block
== NULL_TREE
)
4375 auto_vec
<tree
, 10> block_stack
;
4377 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4378 clear_block_marks (block
);
4380 /* Prune the old trees away, so that they don't get in the way. */
4381 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4382 BLOCK_CHAIN (block
) = NULL_TREE
;
4384 /* Recreate the block tree from the note nesting. */
4385 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4386 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4389 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4392 clear_block_marks (tree block
)
4396 TREE_ASM_WRITTEN (block
) = 0;
4397 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4398 block
= BLOCK_CHAIN (block
);
4403 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4404 vec
<tree
> *p_block_stack
)
4407 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4409 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4413 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4415 tree block
= NOTE_BLOCK (insn
);
4418 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4422 BLOCK_SAME_RANGE (prev_end
) = 0;
4423 prev_end
= NULL_TREE
;
4425 /* If we have seen this block before, that means it now
4426 spans multiple address regions. Create a new fragment. */
4427 if (TREE_ASM_WRITTEN (block
))
4429 tree new_block
= copy_node (block
);
4431 BLOCK_SAME_RANGE (new_block
) = 0;
4432 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4433 BLOCK_FRAGMENT_CHAIN (new_block
)
4434 = BLOCK_FRAGMENT_CHAIN (origin
);
4435 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4437 NOTE_BLOCK (insn
) = new_block
;
4441 if (prev_beg
== current_block
&& prev_beg
)
4442 BLOCK_SAME_RANGE (block
) = 1;
4446 BLOCK_SUBBLOCKS (block
) = 0;
4447 TREE_ASM_WRITTEN (block
) = 1;
4448 /* When there's only one block for the entire function,
4449 current_block == block and we mustn't do this, it
4450 will cause infinite recursion. */
4451 if (block
!= current_block
)
4454 if (block
!= origin
)
4455 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4456 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4459 if (p_block_stack
->is_empty ())
4460 super
= current_block
;
4463 super
= p_block_stack
->last ();
4464 gcc_assert (super
== current_block
4465 || BLOCK_FRAGMENT_ORIGIN (super
)
4468 BLOCK_SUPERCONTEXT (block
) = super
;
4469 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4470 BLOCK_SUBBLOCKS (current_block
) = block
;
4471 current_block
= origin
;
4473 p_block_stack
->safe_push (block
);
4475 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4477 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4478 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4479 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4480 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4481 prev_beg
= NULL_TREE
;
4482 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4483 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4488 prev_beg
= NULL_TREE
;
4490 BLOCK_SAME_RANGE (prev_end
) = 0;
4491 prev_end
= NULL_TREE
;
4496 /* Reverse the order of elements in the chain T of blocks,
4497 and return the new head of the chain (old last element). */
4500 blocks_nreverse (tree t
)
4502 tree prev
= 0, block
, next
;
4503 for (block
= t
; block
; block
= next
)
4505 next
= BLOCK_CHAIN (block
);
4506 BLOCK_CHAIN (block
) = prev
;
4512 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4513 by modifying the last node in chain 1 to point to chain 2. */
4516 block_chainon (tree op1
, tree op2
)
4525 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4527 BLOCK_CHAIN (t1
) = op2
;
4529 #ifdef ENABLE_TREE_CHECKING
4532 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4533 gcc_assert (t2
!= t1
);
4540 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4541 non-NULL, list them all into VECTOR, in a depth-first preorder
4542 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4546 all_blocks (tree block
, tree
*vector
)
4552 TREE_ASM_WRITTEN (block
) = 0;
4554 /* Record this block. */
4556 vector
[n_blocks
] = block
;
4560 /* Record the subblocks, and their subblocks... */
4561 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4562 vector
? vector
+ n_blocks
: 0);
4563 block
= BLOCK_CHAIN (block
);
4569 /* Return a vector containing all the blocks rooted at BLOCK. The
4570 number of elements in the vector is stored in N_BLOCKS_P. The
4571 vector is dynamically allocated; it is the caller's responsibility
4572 to call `free' on the pointer returned. */
4575 get_block_vector (tree block
, int *n_blocks_p
)
4579 *n_blocks_p
= all_blocks (block
, NULL
);
4580 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4581 all_blocks (block
, block_vector
);
4583 return block_vector
;
4586 static GTY(()) int next_block_index
= 2;
4588 /* Set BLOCK_NUMBER for all the blocks in FN. */
4591 number_blocks (tree fn
)
4597 /* For SDB and XCOFF debugging output, we start numbering the blocks
4598 from 1 within each function, rather than keeping a running
4600 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4601 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4602 next_block_index
= 1;
4605 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4607 /* The top-level BLOCK isn't numbered at all. */
4608 for (i
= 1; i
< n_blocks
; ++i
)
4609 /* We number the blocks from two. */
4610 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4612 free (block_vector
);
4617 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4620 debug_find_var_in_block_tree (tree var
, tree block
)
4624 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4628 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4630 tree ret
= debug_find_var_in_block_tree (var
, t
);
4638 /* Keep track of whether we're in a dummy function context. If we are,
4639 we don't want to invoke the set_current_function hook, because we'll
4640 get into trouble if the hook calls target_reinit () recursively or
4641 when the initial initialization is not yet complete. */
4643 static bool in_dummy_function
;
4645 /* Invoke the target hook when setting cfun. Update the optimization options
4646 if the function uses different options than the default. */
4649 invoke_set_current_function_hook (tree fndecl
)
4651 if (!in_dummy_function
)
4653 tree opts
= ((fndecl
)
4654 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4655 : optimization_default_node
);
4658 opts
= optimization_default_node
;
4660 /* Change optimization options if needed. */
4661 if (optimization_current_node
!= opts
)
4663 optimization_current_node
= opts
;
4664 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4667 targetm
.set_current_function (fndecl
);
4668 this_fn_optabs
= this_target_optabs
;
4670 if (opts
!= optimization_default_node
)
4672 init_tree_optimization_optabs (opts
);
4673 if (TREE_OPTIMIZATION_OPTABS (opts
))
4674 this_fn_optabs
= (struct target_optabs
*)
4675 TREE_OPTIMIZATION_OPTABS (opts
);
4680 /* cfun should never be set directly; use this function. */
4683 set_cfun (struct function
*new_cfun
)
4685 if (cfun
!= new_cfun
)
4688 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4692 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4694 static vec
<function_p
> cfun_stack
;
4696 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4697 current_function_decl accordingly. */
4700 push_cfun (struct function
*new_cfun
)
4702 gcc_assert ((!cfun
&& !current_function_decl
)
4703 || (cfun
&& current_function_decl
== cfun
->decl
));
4704 cfun_stack
.safe_push (cfun
);
4705 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4706 set_cfun (new_cfun
);
4709 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4714 struct function
*new_cfun
= cfun_stack
.pop ();
4715 /* When in_dummy_function, we do have a cfun but current_function_decl is
4716 NULL. We also allow pushing NULL cfun and subsequently changing
4717 current_function_decl to something else and have both restored by
4719 gcc_checking_assert (in_dummy_function
4721 || current_function_decl
== cfun
->decl
);
4722 set_cfun (new_cfun
);
4723 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4726 /* Return value of funcdef and increase it. */
4728 get_next_funcdef_no (void)
4730 return funcdef_no
++;
4733 /* Return value of funcdef. */
4735 get_last_funcdef_no (void)
4740 /* Allocate a function structure for FNDECL and set its contents
4741 to the defaults. Set cfun to the newly-allocated object.
4742 Some of the helper functions invoked during initialization assume
4743 that cfun has already been set. Therefore, assign the new object
4744 directly into cfun and invoke the back end hook explicitly at the
4745 very end, rather than initializing a temporary and calling set_cfun
4748 ABSTRACT_P is true if this is a function that will never be seen by
4749 the middle-end. Such functions are front-end concepts (like C++
4750 function templates) that do not correspond directly to functions
4751 placed in object files. */
4754 allocate_struct_function (tree fndecl
, bool abstract_p
)
4756 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4758 cfun
= ggc_cleared_alloc
<function
> ();
4760 init_eh_for_function ();
4762 if (init_machine_status
)
4763 cfun
->machine
= (*init_machine_status
) ();
4765 #ifdef OVERRIDE_ABI_FORMAT
4766 OVERRIDE_ABI_FORMAT (fndecl
);
4769 if (fndecl
!= NULL_TREE
)
4771 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4772 cfun
->decl
= fndecl
;
4773 current_function_funcdef_no
= get_next_funcdef_no ();
4776 invoke_set_current_function_hook (fndecl
);
4778 if (fndecl
!= NULL_TREE
)
4780 tree result
= DECL_RESULT (fndecl
);
4781 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4783 #ifdef PCC_STATIC_STRUCT_RETURN
4784 cfun
->returns_pcc_struct
= 1;
4786 cfun
->returns_struct
= 1;
4789 cfun
->stdarg
= stdarg_p (fntype
);
4791 /* Assume all registers in stdarg functions need to be saved. */
4792 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4793 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4795 /* ??? This could be set on a per-function basis by the front-end
4796 but is this worth the hassle? */
4797 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4798 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4800 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4801 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4805 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4806 instead of just setting it. */
4809 push_struct_function (tree fndecl
)
4811 /* When in_dummy_function we might be in the middle of a pop_cfun and
4812 current_function_decl and cfun may not match. */
4813 gcc_assert (in_dummy_function
4814 || (!cfun
&& !current_function_decl
)
4815 || (cfun
&& current_function_decl
== cfun
->decl
));
4816 cfun_stack
.safe_push (cfun
);
4817 current_function_decl
= fndecl
;
4818 allocate_struct_function (fndecl
, false);
4821 /* Reset crtl and other non-struct-function variables to defaults as
4822 appropriate for emitting rtl at the start of a function. */
4825 prepare_function_start (void)
4827 gcc_assert (!get_last_insn ());
4830 init_varasm_status ();
4832 default_rtl_profile ();
4834 if (flag_stack_usage_info
)
4836 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4837 cfun
->su
->static_stack_size
= -1;
4840 cse_not_expected
= ! optimize
;
4842 /* Caller save not needed yet. */
4843 caller_save_needed
= 0;
4845 /* We haven't done register allocation yet. */
4848 /* Indicate that we have not instantiated virtual registers yet. */
4849 virtuals_instantiated
= 0;
4851 /* Indicate that we want CONCATs now. */
4852 generating_concat_p
= 1;
4854 /* Indicate we have no need of a frame pointer yet. */
4855 frame_pointer_needed
= 0;
4859 push_dummy_function (bool with_decl
)
4861 tree fn_decl
, fn_type
, fn_result_decl
;
4863 gcc_assert (!in_dummy_function
);
4864 in_dummy_function
= true;
4868 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
4869 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
4871 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
4872 NULL_TREE
, void_type_node
);
4873 DECL_RESULT (fn_decl
) = fn_result_decl
;
4876 fn_decl
= NULL_TREE
;
4878 push_struct_function (fn_decl
);
4881 /* Initialize the rtl expansion mechanism so that we can do simple things
4882 like generate sequences. This is used to provide a context during global
4883 initialization of some passes. You must call expand_dummy_function_end
4884 to exit this context. */
4887 init_dummy_function_start (void)
4889 push_dummy_function (false);
4890 prepare_function_start ();
4893 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4894 and initialize static variables for generating RTL for the statements
4898 init_function_start (tree subr
)
4900 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4901 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4903 allocate_struct_function (subr
, false);
4905 /* Initialize backend, if needed. */
4908 prepare_function_start ();
4909 decide_function_section (subr
);
4911 /* Warn if this value is an aggregate type,
4912 regardless of which calling convention we are using for it. */
4913 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4914 warning (OPT_Waggregate_return
, "function returns an aggregate");
4917 /* Expand code to verify the stack_protect_guard. This is invoked at
4918 the end of a function to be protected. */
4920 #ifndef HAVE_stack_protect_test
4921 # define HAVE_stack_protect_test 0
4922 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4926 stack_protect_epilogue (void)
4928 tree guard_decl
= targetm
.stack_protect_guard ();
4929 rtx_code_label
*label
= gen_label_rtx ();
4932 x
= expand_normal (crtl
->stack_protect_guard
);
4933 y
= expand_normal (guard_decl
);
4935 /* Allow the target to compare Y with X without leaking either into
4937 switch ((int) (HAVE_stack_protect_test
!= 0))
4940 tmp
= gen_stack_protect_test (x
, y
, label
);
4949 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4953 /* The noreturn predictor has been moved to the tree level. The rtl-level
4954 predictors estimate this branch about 20%, which isn't enough to get
4955 things moved out of line. Since this is the only extant case of adding
4956 a noreturn function at the rtl level, it doesn't seem worth doing ought
4957 except adding the prediction by hand. */
4958 tmp
= get_last_insn ();
4960 predict_insn_def (as_a
<rtx_insn
*> (tmp
), PRED_NORETURN
, TAKEN
);
4962 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4967 /* Start the RTL for a new function, and set variables used for
4969 SUBR is the FUNCTION_DECL node.
4970 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4971 the function's parameters, which must be run at any return statement. */
4974 expand_function_start (tree subr
)
4976 /* Make sure volatile mem refs aren't considered
4977 valid operands of arithmetic insns. */
4978 init_recog_no_volatile ();
4982 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4985 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4987 /* Make the label for return statements to jump to. Do not special
4988 case machines with special return instructions -- they will be
4989 handled later during jump, ifcvt, or epilogue creation. */
4990 return_label
= gen_label_rtx ();
4992 /* Initialize rtx used to return the value. */
4993 /* Do this before assign_parms so that we copy the struct value address
4994 before any library calls that assign parms might generate. */
4996 /* Decide whether to return the value in memory or in a register. */
4997 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4999 /* Returning something that won't go in a register. */
5000 rtx value_address
= 0;
5002 #ifdef PCC_STATIC_STRUCT_RETURN
5003 if (cfun
->returns_pcc_struct
)
5005 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5006 value_address
= assemble_static_space (size
);
5011 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5012 /* Expect to be passed the address of a place to store the value.
5013 If it is passed as an argument, assign_parms will take care of
5017 value_address
= gen_reg_rtx (Pmode
);
5018 emit_move_insn (value_address
, sv
);
5023 rtx x
= value_address
;
5024 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
5026 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
5027 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
5029 SET_DECL_RTL (DECL_RESULT (subr
), x
);
5032 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5033 /* If return mode is void, this decl rtl should not be used. */
5034 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
5037 /* Compute the return values into a pseudo reg, which we will copy
5038 into the true return register after the cleanups are done. */
5039 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
5040 if (TYPE_MODE (return_type
) != BLKmode
5041 && targetm
.calls
.return_in_msb (return_type
))
5042 /* expand_function_end will insert the appropriate padding in
5043 this case. Use the return value's natural (unpadded) mode
5044 within the function proper. */
5045 SET_DECL_RTL (DECL_RESULT (subr
),
5046 gen_reg_rtx (TYPE_MODE (return_type
)));
5049 /* In order to figure out what mode to use for the pseudo, we
5050 figure out what the mode of the eventual return register will
5051 actually be, and use that. */
5052 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5054 /* Structures that are returned in registers are not
5055 aggregate_value_p, so we may see a PARALLEL or a REG. */
5056 if (REG_P (hard_reg
))
5057 SET_DECL_RTL (DECL_RESULT (subr
),
5058 gen_reg_rtx (GET_MODE (hard_reg
)));
5061 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5062 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
5066 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5067 result to the real return register(s). */
5068 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5070 if (chkp_function_instrumented_p (current_function_decl
))
5072 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
5073 rtx bounds
= targetm
.calls
.chkp_function_value_bounds (return_type
,
5075 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr
), bounds
);
5079 /* Initialize rtx for parameters and local variables.
5080 In some cases this requires emitting insns. */
5081 assign_parms (subr
);
5083 /* If function gets a static chain arg, store it. */
5084 if (cfun
->static_chain_decl
)
5086 tree parm
= cfun
->static_chain_decl
;
5090 local
= gen_reg_rtx (Pmode
);
5091 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5093 set_decl_incoming_rtl (parm
, chain
, false);
5094 SET_DECL_RTL (parm
, local
);
5095 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5097 insn
= emit_move_insn (local
, chain
);
5099 /* Mark the register as eliminable, similar to parameters. */
5101 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5102 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5104 /* If we aren't optimizing, save the static chain onto the stack. */
5107 tree saved_static_chain_decl
5108 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5109 DECL_NAME (parm
), TREE_TYPE (parm
));
5110 rtx saved_static_chain_rtx
5111 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5112 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5113 emit_move_insn (saved_static_chain_rtx
, chain
);
5114 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5115 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5119 /* If the function receives a non-local goto, then store the
5120 bits we need to restore the frame pointer. */
5121 if (cfun
->nonlocal_goto_save_area
)
5126 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5127 gcc_assert (DECL_RTL_SET_P (var
));
5129 t_save
= build4 (ARRAY_REF
,
5130 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5131 cfun
->nonlocal_goto_save_area
,
5132 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5133 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5134 gcc_assert (GET_MODE (r_save
) == Pmode
);
5136 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5137 update_nonlocal_goto_save_area ();
5140 /* The following was moved from init_function_start.
5141 The move is supposed to make sdb output more accurate. */
5142 /* Indicate the beginning of the function body,
5143 as opposed to parm setup. */
5144 emit_note (NOTE_INSN_FUNCTION_BEG
);
5146 gcc_assert (NOTE_P (get_last_insn ()));
5148 parm_birth_insn
= get_last_insn ();
5153 PROFILE_HOOK (current_function_funcdef_no
);
5157 /* If we are doing generic stack checking, the probe should go here. */
5158 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5159 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5163 pop_dummy_function (void)
5166 in_dummy_function
= false;
5169 /* Undo the effects of init_dummy_function_start. */
5171 expand_dummy_function_end (void)
5173 gcc_assert (in_dummy_function
);
5175 /* End any sequences that failed to be closed due to syntax errors. */
5176 while (in_sequence_p ())
5179 /* Outside function body, can't compute type's actual size
5180 until next function's body starts. */
5182 free_after_parsing (cfun
);
5183 free_after_compilation (cfun
);
5184 pop_dummy_function ();
5187 /* Helper for diddle_return_value. */
5190 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5195 if (REG_P (outgoing
))
5196 (*doit
) (outgoing
, arg
);
5197 else if (GET_CODE (outgoing
) == PARALLEL
)
5201 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5203 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5205 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5211 /* Call DOIT for each hard register used as a return value from
5212 the current function. */
5215 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5217 diddle_return_value_1 (doit
, arg
, crtl
->return_bnd
);
5218 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5222 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5228 clobber_return_register (void)
5230 diddle_return_value (do_clobber_return_reg
, NULL
);
5232 /* In case we do use pseudo to return value, clobber it too. */
5233 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5235 tree decl_result
= DECL_RESULT (current_function_decl
);
5236 rtx decl_rtl
= DECL_RTL (decl_result
);
5237 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5239 do_clobber_return_reg (decl_rtl
, NULL
);
5245 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5251 use_return_register (void)
5253 diddle_return_value (do_use_return_reg
, NULL
);
5256 /* Possibly warn about unused parameters. */
5258 do_warn_unused_parameter (tree fn
)
5262 for (decl
= DECL_ARGUMENTS (fn
);
5263 decl
; decl
= DECL_CHAIN (decl
))
5264 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5265 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
5266 && !TREE_NO_WARNING (decl
))
5267 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
5270 /* Set the location of the insn chain starting at INSN to LOC. */
5273 set_insn_locations (rtx_insn
*insn
, int loc
)
5275 while (insn
!= NULL
)
5278 INSN_LOCATION (insn
) = loc
;
5279 insn
= NEXT_INSN (insn
);
5283 /* Generate RTL for the end of the current function. */
5286 expand_function_end (void)
5288 /* If arg_pointer_save_area was referenced only from a nested
5289 function, we will not have initialized it yet. Do that now. */
5290 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5291 get_arg_pointer_save_area ();
5293 /* If we are doing generic stack checking and this function makes calls,
5294 do a stack probe at the start of the function to ensure we have enough
5295 space for another stack frame. */
5296 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5298 rtx_insn
*insn
, *seq
;
5300 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5303 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5305 if (STACK_CHECK_MOVING_SP
)
5306 anti_adjust_stack_and_probe (max_frame_size
, true);
5308 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5311 set_insn_locations (seq
, prologue_location
);
5312 emit_insn_before (seq
, stack_check_probe_note
);
5317 /* End any sequences that failed to be closed due to syntax errors. */
5318 while (in_sequence_p ())
5321 clear_pending_stack_adjust ();
5322 do_pending_stack_adjust ();
5324 /* Output a linenumber for the end of the function.
5325 SDB depends on this. */
5326 set_curr_insn_location (input_location
);
5328 /* Before the return label (if any), clobber the return
5329 registers so that they are not propagated live to the rest of
5330 the function. This can only happen with functions that drop
5331 through; if there had been a return statement, there would
5332 have either been a return rtx, or a jump to the return label.
5334 We delay actual code generation after the current_function_value_rtx
5336 rtx_insn
*clobber_after
= get_last_insn ();
5338 /* Output the label for the actual return from the function. */
5339 emit_label (return_label
);
5341 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5343 /* Let except.c know where it should emit the call to unregister
5344 the function context for sjlj exceptions. */
5345 if (flag_exceptions
)
5346 sjlj_emit_function_exit_after (get_last_insn ());
5350 /* We want to ensure that instructions that may trap are not
5351 moved into the epilogue by scheduling, because we don't
5352 always emit unwind information for the epilogue. */
5353 if (cfun
->can_throw_non_call_exceptions
)
5354 emit_insn (gen_blockage ());
5357 /* If this is an implementation of throw, do what's necessary to
5358 communicate between __builtin_eh_return and the epilogue. */
5359 expand_eh_return ();
5361 /* If scalar return value was computed in a pseudo-reg, or was a named
5362 return value that got dumped to the stack, copy that to the hard
5364 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5366 tree decl_result
= DECL_RESULT (current_function_decl
);
5367 rtx decl_rtl
= DECL_RTL (decl_result
);
5369 if (REG_P (decl_rtl
)
5370 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5371 : DECL_REGISTER (decl_result
))
5373 rtx real_decl_rtl
= crtl
->return_rtx
;
5375 /* This should be set in assign_parms. */
5376 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5378 /* If this is a BLKmode structure being returned in registers,
5379 then use the mode computed in expand_return. Note that if
5380 decl_rtl is memory, then its mode may have been changed,
5381 but that crtl->return_rtx has not. */
5382 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5383 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5385 /* If a non-BLKmode return value should be padded at the least
5386 significant end of the register, shift it left by the appropriate
5387 amount. BLKmode results are handled using the group load/store
5389 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5390 && REG_P (real_decl_rtl
)
5391 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5393 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5394 REGNO (real_decl_rtl
)),
5396 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5398 /* If a named return value dumped decl_return to memory, then
5399 we may need to re-do the PROMOTE_MODE signed/unsigned
5401 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5403 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5404 promote_function_mode (TREE_TYPE (decl_result
),
5405 GET_MODE (decl_rtl
), &unsignedp
,
5406 TREE_TYPE (current_function_decl
), 1);
5408 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5410 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5412 /* If expand_function_start has created a PARALLEL for decl_rtl,
5413 move the result to the real return registers. Otherwise, do
5414 a group load from decl_rtl for a named return. */
5415 if (GET_CODE (decl_rtl
) == PARALLEL
)
5416 emit_group_move (real_decl_rtl
, decl_rtl
);
5418 emit_group_load (real_decl_rtl
, decl_rtl
,
5419 TREE_TYPE (decl_result
),
5420 int_size_in_bytes (TREE_TYPE (decl_result
)));
5422 /* In the case of complex integer modes smaller than a word, we'll
5423 need to generate some non-trivial bitfield insertions. Do that
5424 on a pseudo and not the hard register. */
5425 else if (GET_CODE (decl_rtl
) == CONCAT
5426 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5427 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5429 int old_generating_concat_p
;
5432 old_generating_concat_p
= generating_concat_p
;
5433 generating_concat_p
= 0;
5434 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5435 generating_concat_p
= old_generating_concat_p
;
5437 emit_move_insn (tmp
, decl_rtl
);
5438 emit_move_insn (real_decl_rtl
, tmp
);
5441 emit_move_insn (real_decl_rtl
, decl_rtl
);
5445 /* If returning a structure, arrange to return the address of the value
5446 in a place where debuggers expect to find it.
5448 If returning a structure PCC style,
5449 the caller also depends on this value.
5450 And cfun->returns_pcc_struct is not necessarily set. */
5451 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5452 && !targetm
.calls
.omit_struct_return_reg
)
5454 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5455 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5458 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5459 type
= TREE_TYPE (type
);
5461 value_address
= XEXP (value_address
, 0);
5463 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5464 current_function_decl
, true);
5466 /* Mark this as a function return value so integrate will delete the
5467 assignment and USE below when inlining this function. */
5468 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5470 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5471 value_address
= convert_memory_address (GET_MODE (outgoing
),
5474 emit_move_insn (outgoing
, value_address
);
5476 /* Show return register used to hold result (in this case the address
5478 crtl
->return_rtx
= outgoing
;
5481 /* Emit the actual code to clobber return register. Don't emit
5482 it if clobber_after is a barrier, then the previous basic block
5483 certainly doesn't fall thru into the exit block. */
5484 if (!BARRIER_P (clobber_after
))
5487 clobber_return_register ();
5488 rtx_insn
*seq
= get_insns ();
5491 emit_insn_after (seq
, clobber_after
);
5494 /* Output the label for the naked return from the function. */
5495 if (naked_return_label
)
5496 emit_label (naked_return_label
);
5498 /* @@@ This is a kludge. We want to ensure that instructions that
5499 may trap are not moved into the epilogue by scheduling, because
5500 we don't always emit unwind information for the epilogue. */
5501 if (cfun
->can_throw_non_call_exceptions
5502 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5503 emit_insn (gen_blockage ());
5505 /* If stack protection is enabled for this function, check the guard. */
5506 if (crtl
->stack_protect_guard
)
5507 stack_protect_epilogue ();
5509 /* If we had calls to alloca, and this machine needs
5510 an accurate stack pointer to exit the function,
5511 insert some code to save and restore the stack pointer. */
5512 if (! EXIT_IGNORE_STACK
5513 && cfun
->calls_alloca
)
5518 emit_stack_save (SAVE_FUNCTION
, &tem
);
5519 rtx_insn
*seq
= get_insns ();
5521 emit_insn_before (seq
, parm_birth_insn
);
5523 emit_stack_restore (SAVE_FUNCTION
, tem
);
5526 /* ??? This should no longer be necessary since stupid is no longer with
5527 us, but there are some parts of the compiler (eg reload_combine, and
5528 sh mach_dep_reorg) that still try and compute their own lifetime info
5529 instead of using the general framework. */
5530 use_return_register ();
5534 get_arg_pointer_save_area (void)
5536 rtx ret
= arg_pointer_save_area
;
5540 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5541 arg_pointer_save_area
= ret
;
5544 if (! crtl
->arg_pointer_save_area_init
)
5546 /* Save the arg pointer at the beginning of the function. The
5547 generated stack slot may not be a valid memory address, so we
5548 have to check it and fix it if necessary. */
5550 emit_move_insn (validize_mem (copy_rtx (ret
)),
5551 crtl
->args
.internal_arg_pointer
);
5552 rtx_insn
*seq
= get_insns ();
5555 push_topmost_sequence ();
5556 emit_insn_after (seq
, entry_of_function ());
5557 pop_topmost_sequence ();
5559 crtl
->arg_pointer_save_area_init
= true;
5565 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5566 for the first time. */
5569 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5572 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5575 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5577 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5579 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5580 gcc_assert (*slot
== NULL
);
5585 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5586 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5587 insn, then record COPY as well. */
5590 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5592 hash_table
<insn_cache_hasher
> *hash
;
5595 hash
= epilogue_insn_hash
;
5596 if (!hash
|| !hash
->find (insn
))
5598 hash
= prologue_insn_hash
;
5599 if (!hash
|| !hash
->find (insn
))
5603 slot
= hash
->find_slot (copy
, INSERT
);
5604 gcc_assert (*slot
== NULL
);
5608 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5609 we can be running after reorg, SEQUENCE rtl is possible. */
5612 contains (const_rtx insn
, hash_table
<insn_cache_hasher
> *hash
)
5617 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5619 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5621 for (i
= seq
->len () - 1; i
>= 0; i
--)
5622 if (hash
->find (seq
->element (i
)))
5627 return hash
->find (const_cast<rtx
> (insn
)) != NULL
;
5631 prologue_epilogue_contains (const_rtx insn
)
5633 if (contains (insn
, prologue_insn_hash
))
5635 if (contains (insn
, epilogue_insn_hash
))
5640 /* Insert use of return register before the end of BB. */
5643 emit_use_return_register_into_block (basic_block bb
)
5646 use_return_register ();
5647 rtx_insn
*seq
= get_insns ();
5649 rtx_insn
*insn
= BB_END (bb
);
5650 if (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5651 insn
= prev_cc0_setter (insn
);
5653 emit_insn_before (seq
, insn
);
5657 /* Create a return pattern, either simple_return or return, depending on
5661 gen_return_pattern (bool simple_p
)
5663 if (!HAVE_simple_return
)
5664 gcc_assert (!simple_p
);
5666 return simple_p
? gen_simple_return () : gen_return ();
5669 /* Insert an appropriate return pattern at the end of block BB. This
5670 also means updating block_for_insn appropriately. SIMPLE_P is
5671 the same as in gen_return_pattern and passed to it. */
5674 emit_return_into_block (bool simple_p
, basic_block bb
)
5676 rtx_jump_insn
*jump
= emit_jump_insn_after (gen_return_pattern (simple_p
),
5678 rtx pat
= PATTERN (jump
);
5679 if (GET_CODE (pat
) == PARALLEL
)
5680 pat
= XVECEXP (pat
, 0, 0);
5681 gcc_assert (ANY_RETURN_P (pat
));
5682 JUMP_LABEL (jump
) = pat
;
5685 /* Set JUMP_LABEL for a return insn. */
5688 set_return_jump_label (rtx_insn
*returnjump
)
5690 rtx pat
= PATTERN (returnjump
);
5691 if (GET_CODE (pat
) == PARALLEL
)
5692 pat
= XVECEXP (pat
, 0, 0);
5693 if (ANY_RETURN_P (pat
))
5694 JUMP_LABEL (returnjump
) = pat
;
5696 JUMP_LABEL (returnjump
) = ret_rtx
;
5699 /* Return true if there are any active insns between HEAD and TAIL. */
5701 active_insn_between (rtx_insn
*head
, rtx_insn
*tail
)
5705 if (active_insn_p (tail
))
5709 tail
= PREV_INSN (tail
);
5714 /* LAST_BB is a block that exits, and empty of active instructions.
5715 Examine its predecessors for jumps that can be converted to
5716 (conditional) returns. */
5718 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5719 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5725 auto_vec
<basic_block
> src_bbs (EDGE_COUNT (last_bb
->preds
));
5727 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5728 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5729 src_bbs
.quick_push (e
->src
);
5731 rtx_insn
*label
= BB_HEAD (last_bb
);
5733 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5735 rtx_insn
*jump
= BB_END (bb
);
5737 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5740 e
= find_edge (bb
, last_bb
);
5742 /* If we have an unconditional jump, we can replace that
5743 with a simple return instruction. */
5744 if (simplejump_p (jump
))
5746 /* The use of the return register might be present in the exit
5747 fallthru block. Either:
5748 - removing the use is safe, and we should remove the use in
5749 the exit fallthru block, or
5750 - removing the use is not safe, and we should add it here.
5751 For now, we conservatively choose the latter. Either of the
5752 2 helps in crossjumping. */
5753 emit_use_return_register_into_block (bb
);
5755 emit_return_into_block (simple_p
, bb
);
5759 /* If we have a conditional jump branching to the last
5760 block, we can try to replace that with a conditional
5761 return instruction. */
5762 else if (condjump_p (jump
))
5767 dest
= simple_return_rtx
;
5770 if (!redirect_jump (as_a
<rtx_jump_insn
*> (jump
), dest
, 0))
5772 if (HAVE_simple_return
&& simple_p
)
5776 "Failed to redirect bb %d branch.\n", bb
->index
);
5777 unconverted
.safe_push (e
);
5782 /* See comment in simplejump_p case above. */
5783 emit_use_return_register_into_block (bb
);
5785 /* If this block has only one successor, it both jumps
5786 and falls through to the fallthru block, so we can't
5788 if (single_succ_p (bb
))
5793 if (HAVE_simple_return
&& simple_p
)
5797 "Failed to redirect bb %d branch.\n", bb
->index
);
5798 unconverted
.safe_push (e
);
5803 /* Fix up the CFG for the successful change we just made. */
5804 redirect_edge_succ (e
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
5805 e
->flags
&= ~EDGE_CROSSING
;
5811 /* Emit a return insn for the exit fallthru block. */
5813 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5815 basic_block last_bb
= exit_fallthru_edge
->src
;
5817 if (JUMP_P (BB_END (last_bb
)))
5819 last_bb
= split_edge (exit_fallthru_edge
);
5820 exit_fallthru_edge
= single_succ_edge (last_bb
);
5822 emit_barrier_after (BB_END (last_bb
));
5823 emit_return_into_block (simple_p
, last_bb
);
5824 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5829 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5830 this into place with notes indicating where the prologue ends and where
5831 the epilogue begins. Update the basic block information when possible.
5833 Notes on epilogue placement:
5834 There are several kinds of edges to the exit block:
5835 * a single fallthru edge from LAST_BB
5836 * possibly, edges from blocks containing sibcalls
5837 * possibly, fake edges from infinite loops
5839 The epilogue is always emitted on the fallthru edge from the last basic
5840 block in the function, LAST_BB, into the exit block.
5842 If LAST_BB is empty except for a label, it is the target of every
5843 other basic block in the function that ends in a return. If a
5844 target has a return or simple_return pattern (possibly with
5845 conditional variants), these basic blocks can be changed so that a
5846 return insn is emitted into them, and their target is adjusted to
5847 the real exit block.
5849 Notes on shrink wrapping: We implement a fairly conservative
5850 version of shrink-wrapping rather than the textbook one. We only
5851 generate a single prologue and a single epilogue. This is
5852 sufficient to catch a number of interesting cases involving early
5855 First, we identify the blocks that require the prologue to occur before
5856 them. These are the ones that modify a call-saved register, or reference
5857 any of the stack or frame pointer registers. To simplify things, we then
5858 mark everything reachable from these blocks as also requiring a prologue.
5859 This takes care of loops automatically, and avoids the need to examine
5860 whether MEMs reference the frame, since it is sufficient to check for
5861 occurrences of the stack or frame pointer.
5863 We then compute the set of blocks for which the need for a prologue
5864 is anticipatable (borrowing terminology from the shrink-wrapping
5865 description in Muchnick's book). These are the blocks which either
5866 require a prologue themselves, or those that have only successors
5867 where the prologue is anticipatable. The prologue needs to be
5868 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5869 is not. For the moment, we ensure that only one such edge exists.
5871 The epilogue is placed as described above, but we make a
5872 distinction between inserting return and simple_return patterns
5873 when modifying other blocks that end in a return. Blocks that end
5874 in a sibcall omit the sibcall_epilogue if the block is not in
5878 thread_prologue_and_epilogue_insns (void)
5881 vec
<edge
> unconverted_simple_returns
= vNULL
;
5882 bitmap_head bb_flags
;
5883 rtx_insn
*returnjump
;
5884 rtx_insn
*epilogue_end ATTRIBUTE_UNUSED
;
5885 rtx_insn
*prologue_seq ATTRIBUTE_UNUSED
, *split_prologue_seq ATTRIBUTE_UNUSED
;
5886 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5891 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5894 epilogue_end
= NULL
;
5897 /* Can't deal with multiple successors of the entry block at the
5898 moment. Function should always have at least one entry
5900 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5901 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5902 orig_entry_edge
= entry_edge
;
5904 split_prologue_seq
= NULL
;
5905 if (flag_split_stack
5906 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5909 #ifndef HAVE_split_stack_prologue
5912 gcc_assert (HAVE_split_stack_prologue
);
5915 emit_insn (gen_split_stack_prologue ());
5916 split_prologue_seq
= get_insns ();
5919 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5920 set_insn_locations (split_prologue_seq
, prologue_location
);
5924 prologue_seq
= NULL
;
5925 #ifdef HAVE_prologue
5929 rtx_insn
*seq
= safe_as_a
<rtx_insn
*> (gen_prologue ());
5932 /* Insert an explicit USE for the frame pointer
5933 if the profiling is on and the frame pointer is required. */
5934 if (crtl
->profile
&& frame_pointer_needed
)
5935 emit_use (hard_frame_pointer_rtx
);
5937 /* Retain a map of the prologue insns. */
5938 record_insns (seq
, NULL
, &prologue_insn_hash
);
5939 emit_note (NOTE_INSN_PROLOGUE_END
);
5941 /* Ensure that instructions are not moved into the prologue when
5942 profiling is on. The call to the profiling routine can be
5943 emitted within the live range of a call-clobbered register. */
5944 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5945 emit_insn (gen_blockage ());
5947 prologue_seq
= get_insns ();
5949 set_insn_locations (prologue_seq
, prologue_location
);
5953 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5955 /* Try to perform a kind of shrink-wrapping, making sure the
5956 prologue/epilogue is emitted only around those parts of the
5957 function that require it. */
5959 try_shrink_wrapping (&entry_edge
, orig_entry_edge
, &bb_flags
, prologue_seq
);
5961 if (split_prologue_seq
!= NULL_RTX
)
5963 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
5966 if (prologue_seq
!= NULL_RTX
)
5968 insert_insn_on_edge (prologue_seq
, entry_edge
);
5972 /* If the exit block has no non-fake predecessors, we don't need
5974 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5975 if ((e
->flags
& EDGE_FAKE
) == 0)
5980 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5982 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5984 if (HAVE_simple_return
&& entry_edge
!= orig_entry_edge
)
5986 = get_unconverted_simple_return (exit_fallthru_edge
, bb_flags
,
5987 &unconverted_simple_returns
,
5991 if (exit_fallthru_edge
== NULL
)
5996 basic_block last_bb
= exit_fallthru_edge
->src
;
5998 if (LABEL_P (BB_HEAD (last_bb
))
5999 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
6000 convert_jumps_to_returns (last_bb
, false, vNULL
);
6002 if (EDGE_COUNT (last_bb
->preds
) != 0
6003 && single_succ_p (last_bb
))
6005 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
6006 epilogue_end
= returnjump
= BB_END (last_bb
);
6008 /* Emitting the return may add a basic block.
6009 Fix bb_flags for the added block. */
6010 if (HAVE_simple_return
&& last_bb
!= exit_fallthru_edge
->src
)
6011 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6018 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6019 this marker for the splits of EH_RETURN patterns, and nothing else
6020 uses the flag in the meantime. */
6021 epilogue_completed
= 1;
6023 #ifdef HAVE_eh_return
6024 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6025 some targets, these get split to a special version of the epilogue
6026 code. In order to be able to properly annotate these with unwind
6027 info, try to split them now. If we get a valid split, drop an
6028 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6029 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6031 rtx_insn
*prev
, *last
, *trial
;
6033 if (e
->flags
& EDGE_FALLTHRU
)
6035 last
= BB_END (e
->src
);
6036 if (!eh_returnjump_p (last
))
6039 prev
= PREV_INSN (last
);
6040 trial
= try_split (PATTERN (last
), last
, 1);
6044 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6045 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6049 /* If nothing falls through into the exit block, we don't need an
6052 if (exit_fallthru_edge
== NULL
)
6058 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6059 rtx_insn
*seq
= as_a
<rtx_insn
*> (gen_epilogue ());
6061 emit_jump_insn (seq
);
6063 /* Retain a map of the epilogue insns. */
6064 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6065 set_insn_locations (seq
, epilogue_location
);
6068 returnjump
= get_last_insn ();
6071 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6074 if (JUMP_P (returnjump
))
6075 set_return_jump_label (returnjump
);
6081 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6083 /* We have a fall-through edge to the exit block, the source is not
6084 at the end of the function, and there will be an assembler epilogue
6085 at the end of the function.
6086 We can't use force_nonfallthru here, because that would try to
6087 use return. Inserting a jump 'by hand' is extremely messy, so
6088 we take advantage of cfg_layout_finalize using
6089 fixup_fallthru_exit_predecessor. */
6090 cfg_layout_initialize (0);
6091 FOR_EACH_BB_FN (cur_bb
, cfun
)
6092 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6093 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6094 cur_bb
->aux
= cur_bb
->next_bb
;
6095 cfg_layout_finalize ();
6100 default_rtl_profile ();
6106 commit_edge_insertions ();
6108 /* Look for basic blocks within the prologue insns. */
6109 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6110 bitmap_clear (blocks
);
6111 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6112 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6113 find_many_sub_basic_blocks (blocks
);
6114 sbitmap_free (blocks
);
6116 /* The epilogue insns we inserted may cause the exit edge to no longer
6118 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6120 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6121 && returnjump_p (BB_END (e
->src
)))
6122 e
->flags
&= ~EDGE_FALLTHRU
;
6126 if (HAVE_simple_return
)
6127 convert_to_simple_return (entry_edge
, orig_entry_edge
, bb_flags
,
6128 returnjump
, unconverted_simple_returns
);
6130 #ifdef HAVE_sibcall_epilogue
6131 /* Emit sibling epilogues before any sibling call sites. */
6132 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
); (e
=
6136 basic_block bb
= e
->src
;
6137 rtx_insn
*insn
= BB_END (bb
);
6141 || ! SIBLING_CALL_P (insn
)
6142 || (HAVE_simple_return
&& (entry_edge
!= orig_entry_edge
6143 && !bitmap_bit_p (&bb_flags
, bb
->index
))))
6149 ep_seq
= gen_sibcall_epilogue ();
6153 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6155 rtx_insn
*seq
= get_insns ();
6158 /* Retain a map of the epilogue insns. Used in life analysis to
6159 avoid getting rid of sibcall epilogue insns. Do this before we
6160 actually emit the sequence. */
6161 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6162 set_insn_locations (seq
, epilogue_location
);
6164 emit_insn_before (seq
, insn
);
6172 rtx_insn
*insn
, *next
;
6174 /* Similarly, move any line notes that appear after the epilogue.
6175 There is no need, however, to be quite so anal about the existence
6176 of such a note. Also possibly move
6177 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6179 for (insn
= epilogue_end
; insn
; insn
= next
)
6181 next
= NEXT_INSN (insn
);
6183 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6184 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6188 bitmap_clear (&bb_flags
);
6190 /* Threading the prologue and epilogue changes the artificial refs
6191 in the entry and exit blocks. */
6192 epilogue_completed
= 1;
6193 df_update_entry_exit_and_calls ();
6196 /* Reposition the prologue-end and epilogue-begin notes after
6197 instruction scheduling. */
6200 reposition_prologue_and_epilogue_notes (void)
6202 #if ! defined (HAVE_prologue) && ! defined (HAVE_sibcall_epilogue)
6207 /* Since the hash table is created on demand, the fact that it is
6208 non-null is a signal that it is non-empty. */
6209 if (prologue_insn_hash
!= NULL
)
6211 size_t len
= prologue_insn_hash
->elements ();
6212 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6214 /* Scan from the beginning until we reach the last prologue insn. */
6215 /* ??? While we do have the CFG intact, there are two problems:
6216 (1) The prologue can contain loops (typically probing the stack),
6217 which means that the end of the prologue isn't in the first bb.
6218 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6219 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6223 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6226 else if (contains (insn
, prologue_insn_hash
))
6238 /* Scan forward looking for the PROLOGUE_END note. It should
6239 be right at the beginning of the block, possibly with other
6240 insn notes that got moved there. */
6241 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6244 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6249 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6251 last
= NEXT_INSN (last
);
6252 reorder_insns (note
, note
, last
);
6256 if (epilogue_insn_hash
!= NULL
)
6261 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6263 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6264 basic_block bb
= e
->src
;
6266 /* Scan from the beginning until we reach the first epilogue insn. */
6267 FOR_BB_INSNS (bb
, insn
)
6271 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6278 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6288 /* If the function has a single basic block, and no real
6289 epilogue insns (e.g. sibcall with no cleanup), the
6290 epilogue note can get scheduled before the prologue
6291 note. If we have frame related prologue insns, having
6292 them scanned during the epilogue will result in a crash.
6293 In this case re-order the epilogue note to just before
6294 the last insn in the block. */
6296 first
= BB_END (bb
);
6298 if (PREV_INSN (first
) != note
)
6299 reorder_insns (note
, note
, PREV_INSN (first
));
6305 /* Returns the name of function declared by FNDECL. */
6307 fndecl_name (tree fndecl
)
6311 return lang_hooks
.decl_printable_name (fndecl
, 2);
6314 /* Returns the name of function FN. */
6316 function_name (struct function
*fn
)
6318 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6319 return fndecl_name (fndecl
);
6322 /* Returns the name of the current function. */
6324 current_function_name (void)
6326 return function_name (cfun
);
6331 rest_of_handle_check_leaf_regs (void)
6333 #ifdef LEAF_REGISTERS
6334 crtl
->uses_only_leaf_regs
6335 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6340 /* Insert a TYPE into the used types hash table of CFUN. */
6343 used_types_insert_helper (tree type
, struct function
*func
)
6345 if (type
!= NULL
&& func
!= NULL
)
6347 if (func
->used_types_hash
== NULL
)
6348 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6350 func
->used_types_hash
->add (type
);
6354 /* Given a type, insert it into the used hash table in cfun. */
6356 used_types_insert (tree t
)
6358 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6363 if (TREE_CODE (t
) == ERROR_MARK
)
6365 if (TYPE_NAME (t
) == NULL_TREE
6366 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6367 t
= TYPE_MAIN_VARIANT (t
);
6368 if (debug_info_level
> DINFO_LEVEL_NONE
)
6371 used_types_insert_helper (t
, cfun
);
6374 /* So this might be a type referenced by a global variable.
6375 Record that type so that we can later decide to emit its
6376 debug information. */
6377 vec_safe_push (types_used_by_cur_var_decl
, t
);
6382 /* Helper to Hash a struct types_used_by_vars_entry. */
6385 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6387 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6389 return iterative_hash_object (entry
->type
,
6390 iterative_hash_object (entry
->var_decl
, 0));
6393 /* Hash function of the types_used_by_vars_entry hash table. */
6396 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6398 return hash_types_used_by_vars_entry (entry
);
6401 /*Equality function of the types_used_by_vars_entry hash table. */
6404 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6405 types_used_by_vars_entry
*e2
)
6407 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6410 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6413 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6415 if (type
!= NULL
&& var_decl
!= NULL
)
6417 types_used_by_vars_entry
**slot
;
6418 struct types_used_by_vars_entry e
;
6419 e
.var_decl
= var_decl
;
6421 if (types_used_by_vars_hash
== NULL
)
6422 types_used_by_vars_hash
6423 = hash_table
<used_type_hasher
>::create_ggc (37);
6425 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6428 struct types_used_by_vars_entry
*entry
;
6429 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6431 entry
->var_decl
= var_decl
;
6439 const pass_data pass_data_leaf_regs
=
6441 RTL_PASS
, /* type */
6442 "*leaf_regs", /* name */
6443 OPTGROUP_NONE
, /* optinfo_flags */
6444 TV_NONE
, /* tv_id */
6445 0, /* properties_required */
6446 0, /* properties_provided */
6447 0, /* properties_destroyed */
6448 0, /* todo_flags_start */
6449 0, /* todo_flags_finish */
6452 class pass_leaf_regs
: public rtl_opt_pass
6455 pass_leaf_regs (gcc::context
*ctxt
)
6456 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6459 /* opt_pass methods: */
6460 virtual unsigned int execute (function
*)
6462 return rest_of_handle_check_leaf_regs ();
6465 }; // class pass_leaf_regs
6470 make_pass_leaf_regs (gcc::context
*ctxt
)
6472 return new pass_leaf_regs (ctxt
);
6476 rest_of_handle_thread_prologue_and_epilogue (void)
6479 cleanup_cfg (CLEANUP_EXPENSIVE
);
6481 /* On some machines, the prologue and epilogue code, or parts thereof,
6482 can be represented as RTL. Doing so lets us schedule insns between
6483 it and the rest of the code and also allows delayed branch
6484 scheduling to operate in the epilogue. */
6485 thread_prologue_and_epilogue_insns ();
6487 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6491 /* The stack usage info is finalized during prologue expansion. */
6492 if (flag_stack_usage_info
)
6493 output_stack_usage ();
6500 const pass_data pass_data_thread_prologue_and_epilogue
=
6502 RTL_PASS
, /* type */
6503 "pro_and_epilogue", /* name */
6504 OPTGROUP_NONE
, /* optinfo_flags */
6505 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6506 0, /* properties_required */
6507 0, /* properties_provided */
6508 0, /* properties_destroyed */
6509 0, /* todo_flags_start */
6510 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6513 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6516 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6517 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6520 /* opt_pass methods: */
6521 virtual unsigned int execute (function
*)
6523 return rest_of_handle_thread_prologue_and_epilogue ();
6526 }; // class pass_thread_prologue_and_epilogue
6531 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6533 return new pass_thread_prologue_and_epilogue (ctxt
);
6537 /* This mini-pass fixes fall-out from SSA in asm statements that have
6538 in-out constraints. Say you start with
6541 asm ("": "+mr" (inout));
6544 which is transformed very early to use explicit output and match operands:
6547 asm ("": "=mr" (inout) : "0" (inout));
6550 Or, after SSA and copyprop,
6552 asm ("": "=mr" (inout_2) : "0" (inout_1));
6555 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6556 they represent two separate values, so they will get different pseudo
6557 registers during expansion. Then, since the two operands need to match
6558 per the constraints, but use different pseudo registers, reload can
6559 only register a reload for these operands. But reloads can only be
6560 satisfied by hardregs, not by memory, so we need a register for this
6561 reload, just because we are presented with non-matching operands.
6562 So, even though we allow memory for this operand, no memory can be
6563 used for it, just because the two operands don't match. This can
6564 cause reload failures on register-starved targets.
6566 So it's a symptom of reload not being able to use memory for reloads
6567 or, alternatively it's also a symptom of both operands not coming into
6568 reload as matching (in which case the pseudo could go to memory just
6569 fine, as the alternative allows it, and no reload would be necessary).
6570 We fix the latter problem here, by transforming
6572 asm ("": "=mr" (inout_2) : "0" (inout_1));
6577 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6580 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6583 bool changed
= false;
6584 rtx op
= SET_SRC (p_sets
[0]);
6585 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6586 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6587 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6589 memset (output_matched
, 0, noutputs
* sizeof (bool));
6590 for (i
= 0; i
< ninputs
; i
++)
6594 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6598 if (*constraint
== '%')
6601 match
= strtoul (constraint
, &end
, 10);
6602 if (end
== constraint
)
6605 gcc_assert (match
< noutputs
);
6606 output
= SET_DEST (p_sets
[match
]);
6607 input
= RTVEC_ELT (inputs
, i
);
6608 /* Only do the transformation for pseudos. */
6609 if (! REG_P (output
)
6610 || rtx_equal_p (output
, input
)
6611 || (GET_MODE (input
) != VOIDmode
6612 && GET_MODE (input
) != GET_MODE (output
)))
6615 /* We can't do anything if the output is also used as input,
6616 as we're going to overwrite it. */
6617 for (j
= 0; j
< ninputs
; j
++)
6618 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6623 /* Avoid changing the same input several times. For
6624 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6625 only change in once (to out1), rather than changing it
6626 first to out1 and afterwards to out2. */
6629 for (j
= 0; j
< noutputs
; j
++)
6630 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6635 output_matched
[match
] = true;
6638 emit_move_insn (output
, input
);
6639 insns
= get_insns ();
6641 emit_insn_before (insns
, insn
);
6643 /* Now replace all mentions of the input with output. We can't
6644 just replace the occurrence in inputs[i], as the register might
6645 also be used in some other input (or even in an address of an
6646 output), which would mean possibly increasing the number of
6647 inputs by one (namely 'output' in addition), which might pose
6648 a too complicated problem for reload to solve. E.g. this situation:
6650 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6652 Here 'input' is used in two occurrences as input (once for the
6653 input operand, once for the address in the second output operand).
6654 If we would replace only the occurrence of the input operand (to
6655 make the matching) we would be left with this:
6658 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6660 Now we suddenly have two different input values (containing the same
6661 value, but different pseudos) where we formerly had only one.
6662 With more complicated asms this might lead to reload failures
6663 which wouldn't have happen without this pass. So, iterate over
6664 all operands and replace all occurrences of the register used. */
6665 for (j
= 0; j
< noutputs
; j
++)
6666 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6667 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6668 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6670 for (j
= 0; j
< ninputs
; j
++)
6671 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6672 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6679 df_insn_rescan (insn
);
6682 /* Add the decl D to the local_decls list of FUN. */
6685 add_local_decl (struct function
*fun
, tree d
)
6687 gcc_assert (TREE_CODE (d
) == VAR_DECL
);
6688 vec_safe_push (fun
->local_decls
, d
);
6693 const pass_data pass_data_match_asm_constraints
=
6695 RTL_PASS
, /* type */
6696 "asmcons", /* name */
6697 OPTGROUP_NONE
, /* optinfo_flags */
6698 TV_NONE
, /* tv_id */
6699 0, /* properties_required */
6700 0, /* properties_provided */
6701 0, /* properties_destroyed */
6702 0, /* todo_flags_start */
6703 0, /* todo_flags_finish */
6706 class pass_match_asm_constraints
: public rtl_opt_pass
6709 pass_match_asm_constraints (gcc::context
*ctxt
)
6710 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6713 /* opt_pass methods: */
6714 virtual unsigned int execute (function
*);
6716 }; // class pass_match_asm_constraints
6719 pass_match_asm_constraints::execute (function
*fun
)
6726 if (!crtl
->has_asm_statement
)
6729 df_set_flags (DF_DEFER_INSN_RESCAN
);
6730 FOR_EACH_BB_FN (bb
, fun
)
6732 FOR_BB_INSNS (bb
, insn
)
6737 pat
= PATTERN (insn
);
6738 if (GET_CODE (pat
) == PARALLEL
)
6739 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6740 else if (GET_CODE (pat
) == SET
)
6741 p_sets
= &PATTERN (insn
), noutputs
= 1;
6745 if (GET_CODE (*p_sets
) == SET
6746 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6747 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6751 return TODO_df_finish
;
6757 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6759 return new pass_match_asm_constraints (ctxt
);
6763 #include "gt-function.h"