re PR rtl-optimization/49941 (segmentation fault in redirect_jump_2)
[official-gcc.git] / gcc / function.c
blobc3702fb3419fbfe1325decc611aa09f8c81c623f
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
69 /* So we can assign to cfun in this file. */
70 #undef cfun
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These hashes record the prologue and epilogue insns. */
128 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t prologue_insn_hash;
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t epilogue_insn_hash;
134 htab_t types_used_by_vars_hash = NULL;
135 VEC(tree,gc) *types_used_by_cur_var_decl;
137 /* Forward declarations. */
139 static struct temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141 static void pad_below (struct args_size *, enum machine_mode, tree);
142 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
143 static int all_blocks (tree, tree *);
144 static tree *get_block_vector (tree, int *);
145 extern tree debug_find_var_in_block_tree (tree, tree);
146 /* We always define `record_insns' even if it's not used so that we
147 can always export `prologue_epilogue_contains'. */
148 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
149 static bool contains (const_rtx, htab_t);
150 static void prepare_function_start (void);
151 static void do_clobber_return_reg (rtx, void *);
152 static void do_use_return_reg (rtx, void *);
153 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
155 /* Stack of nested functions. */
156 /* Keep track of the cfun stack. */
158 typedef struct function *function_p;
160 DEF_VEC_P(function_p);
161 DEF_VEC_ALLOC_P(function_p,heap);
162 static VEC(function_p,heap) *function_context_stack;
164 /* Save the current context for compilation of a nested function.
165 This is called from language-specific code. */
167 void
168 push_function_context (void)
170 if (cfun == 0)
171 allocate_struct_function (NULL, false);
173 VEC_safe_push (function_p, heap, function_context_stack, cfun);
174 set_cfun (NULL);
177 /* Restore the last saved context, at the end of a nested function.
178 This function is called from language-specific code. */
180 void
181 pop_function_context (void)
183 struct function *p = VEC_pop (function_p, function_context_stack);
184 set_cfun (p);
185 current_function_decl = p->decl;
187 /* Reset variables that have known state during rtx generation. */
188 virtuals_instantiated = 0;
189 generating_concat_p = 1;
192 /* Clear out all parts of the state in F that can safely be discarded
193 after the function has been parsed, but not compiled, to let
194 garbage collection reclaim the memory. */
196 void
197 free_after_parsing (struct function *f)
199 f->language = 0;
202 /* Clear out all parts of the state in F that can safely be discarded
203 after the function has been compiled, to let garbage collection
204 reclaim the memory. */
206 void
207 free_after_compilation (struct function *f)
209 prologue_insn_hash = NULL;
210 epilogue_insn_hash = NULL;
212 free (crtl->emit.regno_pointer_align);
214 memset (crtl, 0, sizeof (struct rtl_data));
215 f->eh = NULL;
216 f->machine = NULL;
217 f->cfg = NULL;
219 regno_reg_rtx = NULL;
220 insn_locators_free ();
223 /* Return size needed for stack frame based on slots so far allocated.
224 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
225 the caller may have to do that. */
227 HOST_WIDE_INT
228 get_frame_size (void)
230 if (FRAME_GROWS_DOWNWARD)
231 return -frame_offset;
232 else
233 return frame_offset;
236 /* Issue an error message and return TRUE if frame OFFSET overflows in
237 the signed target pointer arithmetics for function FUNC. Otherwise
238 return FALSE. */
240 bool
241 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
243 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
245 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
246 /* Leave room for the fixed part of the frame. */
247 - 64 * UNITS_PER_WORD)
249 error_at (DECL_SOURCE_LOCATION (func),
250 "total size of local objects too large");
251 return TRUE;
254 return FALSE;
257 /* Return stack slot alignment in bits for TYPE and MODE. */
259 static unsigned int
260 get_stack_local_alignment (tree type, enum machine_mode mode)
262 unsigned int alignment;
264 if (mode == BLKmode)
265 alignment = BIGGEST_ALIGNMENT;
266 else
267 alignment = GET_MODE_ALIGNMENT (mode);
269 /* Allow the frond-end to (possibly) increase the alignment of this
270 stack slot. */
271 if (! type)
272 type = lang_hooks.types.type_for_mode (mode, 0);
274 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
277 /* Determine whether it is possible to fit a stack slot of size SIZE and
278 alignment ALIGNMENT into an area in the stack frame that starts at
279 frame offset START and has a length of LENGTH. If so, store the frame
280 offset to be used for the stack slot in *POFFSET and return true;
281 return false otherwise. This function will extend the frame size when
282 given a start/length pair that lies at the end of the frame. */
284 static bool
285 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
286 HOST_WIDE_INT size, unsigned int alignment,
287 HOST_WIDE_INT *poffset)
289 HOST_WIDE_INT this_frame_offset;
290 int frame_off, frame_alignment, frame_phase;
292 /* Calculate how many bytes the start of local variables is off from
293 stack alignment. */
294 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
295 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
296 frame_phase = frame_off ? frame_alignment - frame_off : 0;
298 /* Round the frame offset to the specified alignment. */
300 /* We must be careful here, since FRAME_OFFSET might be negative and
301 division with a negative dividend isn't as well defined as we might
302 like. So we instead assume that ALIGNMENT is a power of two and
303 use logical operations which are unambiguous. */
304 if (FRAME_GROWS_DOWNWARD)
305 this_frame_offset
306 = (FLOOR_ROUND (start + length - size - frame_phase,
307 (unsigned HOST_WIDE_INT) alignment)
308 + frame_phase);
309 else
310 this_frame_offset
311 = (CEIL_ROUND (start - frame_phase,
312 (unsigned HOST_WIDE_INT) alignment)
313 + frame_phase);
315 /* See if it fits. If this space is at the edge of the frame,
316 consider extending the frame to make it fit. Our caller relies on
317 this when allocating a new slot. */
318 if (frame_offset == start && this_frame_offset < frame_offset)
319 frame_offset = this_frame_offset;
320 else if (this_frame_offset < start)
321 return false;
322 else if (start + length == frame_offset
323 && this_frame_offset + size > start + length)
324 frame_offset = this_frame_offset + size;
325 else if (this_frame_offset + size > start + length)
326 return false;
328 *poffset = this_frame_offset;
329 return true;
332 /* Create a new frame_space structure describing free space in the stack
333 frame beginning at START and ending at END, and chain it into the
334 function's frame_space_list. */
336 static void
337 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
339 struct frame_space *space = ggc_alloc_frame_space ();
340 space->next = crtl->frame_space_list;
341 crtl->frame_space_list = space;
342 space->start = start;
343 space->length = end - start;
346 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
347 with machine mode MODE.
349 ALIGN controls the amount of alignment for the address of the slot:
350 0 means according to MODE,
351 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
352 -2 means use BITS_PER_UNIT,
353 positive specifies alignment boundary in bits.
355 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
356 alignment and ASLK_RECORD_PAD bit set if we should remember
357 extra space we allocated for alignment purposes. When we are
358 called from assign_stack_temp_for_type, it is not set so we don't
359 track the same stack slot in two independent lists.
361 We do not round to stack_boundary here. */
364 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
365 int align, int kind)
367 rtx x, addr;
368 int bigend_correction = 0;
369 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
370 unsigned int alignment, alignment_in_bits;
372 if (align == 0)
374 alignment = get_stack_local_alignment (NULL, mode);
375 alignment /= BITS_PER_UNIT;
377 else if (align == -1)
379 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
380 size = CEIL_ROUND (size, alignment);
382 else if (align == -2)
383 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
384 else
385 alignment = align / BITS_PER_UNIT;
387 alignment_in_bits = alignment * BITS_PER_UNIT;
389 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
390 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
392 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
393 alignment = alignment_in_bits / BITS_PER_UNIT;
396 if (SUPPORTS_STACK_ALIGNMENT)
398 if (crtl->stack_alignment_estimated < alignment_in_bits)
400 if (!crtl->stack_realign_processed)
401 crtl->stack_alignment_estimated = alignment_in_bits;
402 else
404 /* If stack is realigned and stack alignment value
405 hasn't been finalized, it is OK not to increase
406 stack_alignment_estimated. The bigger alignment
407 requirement is recorded in stack_alignment_needed
408 below. */
409 gcc_assert (!crtl->stack_realign_finalized);
410 if (!crtl->stack_realign_needed)
412 /* It is OK to reduce the alignment as long as the
413 requested size is 0 or the estimated stack
414 alignment >= mode alignment. */
415 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
416 || size == 0
417 || (crtl->stack_alignment_estimated
418 >= GET_MODE_ALIGNMENT (mode)));
419 alignment_in_bits = crtl->stack_alignment_estimated;
420 alignment = alignment_in_bits / BITS_PER_UNIT;
426 if (crtl->stack_alignment_needed < alignment_in_bits)
427 crtl->stack_alignment_needed = alignment_in_bits;
428 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
429 crtl->max_used_stack_slot_alignment = alignment_in_bits;
431 if (mode != BLKmode || size != 0)
433 if (kind & ASLK_RECORD_PAD)
435 struct frame_space **psp;
437 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
439 struct frame_space *space = *psp;
440 if (!try_fit_stack_local (space->start, space->length, size,
441 alignment, &slot_offset))
442 continue;
443 *psp = space->next;
444 if (slot_offset > space->start)
445 add_frame_space (space->start, slot_offset);
446 if (slot_offset + size < space->start + space->length)
447 add_frame_space (slot_offset + size,
448 space->start + space->length);
449 goto found_space;
453 else if (!STACK_ALIGNMENT_NEEDED)
455 slot_offset = frame_offset;
456 goto found_space;
459 old_frame_offset = frame_offset;
461 if (FRAME_GROWS_DOWNWARD)
463 frame_offset -= size;
464 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
466 if (kind & ASLK_RECORD_PAD)
468 if (slot_offset > frame_offset)
469 add_frame_space (frame_offset, slot_offset);
470 if (slot_offset + size < old_frame_offset)
471 add_frame_space (slot_offset + size, old_frame_offset);
474 else
476 frame_offset += size;
477 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
479 if (kind & ASLK_RECORD_PAD)
481 if (slot_offset > old_frame_offset)
482 add_frame_space (old_frame_offset, slot_offset);
483 if (slot_offset + size < frame_offset)
484 add_frame_space (slot_offset + size, frame_offset);
488 found_space:
489 /* On a big-endian machine, if we are allocating more space than we will use,
490 use the least significant bytes of those that are allocated. */
491 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
492 bigend_correction = size - GET_MODE_SIZE (mode);
494 /* If we have already instantiated virtual registers, return the actual
495 address relative to the frame pointer. */
496 if (virtuals_instantiated)
497 addr = plus_constant (frame_pointer_rtx,
498 trunc_int_for_mode
499 (slot_offset + bigend_correction
500 + STARTING_FRAME_OFFSET, Pmode));
501 else
502 addr = plus_constant (virtual_stack_vars_rtx,
503 trunc_int_for_mode
504 (slot_offset + bigend_correction,
505 Pmode));
507 x = gen_rtx_MEM (mode, addr);
508 set_mem_align (x, alignment_in_bits);
509 MEM_NOTRAP_P (x) = 1;
511 stack_slot_list
512 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
514 if (frame_offset_overflow (frame_offset, current_function_decl))
515 frame_offset = 0;
517 return x;
520 /* Wrap up assign_stack_local_1 with last parameter as false. */
523 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
525 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
529 /* In order to evaluate some expressions, such as function calls returning
530 structures in memory, we need to temporarily allocate stack locations.
531 We record each allocated temporary in the following structure.
533 Associated with each temporary slot is a nesting level. When we pop up
534 one level, all temporaries associated with the previous level are freed.
535 Normally, all temporaries are freed after the execution of the statement
536 in which they were created. However, if we are inside a ({...}) grouping,
537 the result may be in a temporary and hence must be preserved. If the
538 result could be in a temporary, we preserve it if we can determine which
539 one it is in. If we cannot determine which temporary may contain the
540 result, all temporaries are preserved. A temporary is preserved by
541 pretending it was allocated at the previous nesting level.
543 Automatic variables are also assigned temporary slots, at the nesting
544 level where they are defined. They are marked a "kept" so that
545 free_temp_slots will not free them. */
547 struct GTY(()) temp_slot {
548 /* Points to next temporary slot. */
549 struct temp_slot *next;
550 /* Points to previous temporary slot. */
551 struct temp_slot *prev;
552 /* The rtx to used to reference the slot. */
553 rtx slot;
554 /* The size, in units, of the slot. */
555 HOST_WIDE_INT size;
556 /* The type of the object in the slot, or zero if it doesn't correspond
557 to a type. We use this to determine whether a slot can be reused.
558 It can be reused if objects of the type of the new slot will always
559 conflict with objects of the type of the old slot. */
560 tree type;
561 /* The alignment (in bits) of the slot. */
562 unsigned int align;
563 /* Nonzero if this temporary is currently in use. */
564 char in_use;
565 /* Nonzero if this temporary has its address taken. */
566 char addr_taken;
567 /* Nesting level at which this slot is being used. */
568 int level;
569 /* Nonzero if this should survive a call to free_temp_slots. */
570 int keep;
571 /* The offset of the slot from the frame_pointer, including extra space
572 for alignment. This info is for combine_temp_slots. */
573 HOST_WIDE_INT base_offset;
574 /* The size of the slot, including extra space for alignment. This
575 info is for combine_temp_slots. */
576 HOST_WIDE_INT full_size;
579 /* A table of addresses that represent a stack slot. The table is a mapping
580 from address RTXen to a temp slot. */
581 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
583 /* Entry for the above hash table. */
584 struct GTY(()) temp_slot_address_entry {
585 hashval_t hash;
586 rtx address;
587 struct temp_slot *temp_slot;
590 /* Removes temporary slot TEMP from LIST. */
592 static void
593 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
595 if (temp->next)
596 temp->next->prev = temp->prev;
597 if (temp->prev)
598 temp->prev->next = temp->next;
599 else
600 *list = temp->next;
602 temp->prev = temp->next = NULL;
605 /* Inserts temporary slot TEMP to LIST. */
607 static void
608 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
610 temp->next = *list;
611 if (*list)
612 (*list)->prev = temp;
613 temp->prev = NULL;
614 *list = temp;
617 /* Returns the list of used temp slots at LEVEL. */
619 static struct temp_slot **
620 temp_slots_at_level (int level)
622 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
623 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
625 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
628 /* Returns the maximal temporary slot level. */
630 static int
631 max_slot_level (void)
633 if (!used_temp_slots)
634 return -1;
636 return VEC_length (temp_slot_p, used_temp_slots) - 1;
639 /* Moves temporary slot TEMP to LEVEL. */
641 static void
642 move_slot_to_level (struct temp_slot *temp, int level)
644 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
645 insert_slot_to_list (temp, temp_slots_at_level (level));
646 temp->level = level;
649 /* Make temporary slot TEMP available. */
651 static void
652 make_slot_available (struct temp_slot *temp)
654 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
655 insert_slot_to_list (temp, &avail_temp_slots);
656 temp->in_use = 0;
657 temp->level = -1;
660 /* Compute the hash value for an address -> temp slot mapping.
661 The value is cached on the mapping entry. */
662 static hashval_t
663 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
665 int do_not_record = 0;
666 return hash_rtx (t->address, GET_MODE (t->address),
667 &do_not_record, NULL, false);
670 /* Return the hash value for an address -> temp slot mapping. */
671 static hashval_t
672 temp_slot_address_hash (const void *p)
674 const struct temp_slot_address_entry *t;
675 t = (const struct temp_slot_address_entry *) p;
676 return t->hash;
679 /* Compare two address -> temp slot mapping entries. */
680 static int
681 temp_slot_address_eq (const void *p1, const void *p2)
683 const struct temp_slot_address_entry *t1, *t2;
684 t1 = (const struct temp_slot_address_entry *) p1;
685 t2 = (const struct temp_slot_address_entry *) p2;
686 return exp_equiv_p (t1->address, t2->address, 0, true);
689 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
690 static void
691 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
693 void **slot;
694 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
695 t->address = address;
696 t->temp_slot = temp_slot;
697 t->hash = temp_slot_address_compute_hash (t);
698 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
699 *slot = t;
702 /* Remove an address -> temp slot mapping entry if the temp slot is
703 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
704 static int
705 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
707 const struct temp_slot_address_entry *t;
708 t = (const struct temp_slot_address_entry *) *slot;
709 if (! t->temp_slot->in_use)
710 *slot = NULL;
711 return 1;
714 /* Remove all mappings of addresses to unused temp slots. */
715 static void
716 remove_unused_temp_slot_addresses (void)
718 htab_traverse (temp_slot_address_table,
719 remove_unused_temp_slot_addresses_1,
720 NULL);
723 /* Find the temp slot corresponding to the object at address X. */
725 static struct temp_slot *
726 find_temp_slot_from_address (rtx x)
728 struct temp_slot *p;
729 struct temp_slot_address_entry tmp, *t;
731 /* First try the easy way:
732 See if X exists in the address -> temp slot mapping. */
733 tmp.address = x;
734 tmp.temp_slot = NULL;
735 tmp.hash = temp_slot_address_compute_hash (&tmp);
736 t = (struct temp_slot_address_entry *)
737 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
738 if (t)
739 return t->temp_slot;
741 /* If we have a sum involving a register, see if it points to a temp
742 slot. */
743 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
744 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
745 return p;
746 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
747 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
748 return p;
750 /* Last resort: Address is a virtual stack var address. */
751 if (GET_CODE (x) == PLUS
752 && XEXP (x, 0) == virtual_stack_vars_rtx
753 && CONST_INT_P (XEXP (x, 1)))
755 int i;
756 for (i = max_slot_level (); i >= 0; i--)
757 for (p = *temp_slots_at_level (i); p; p = p->next)
759 if (INTVAL (XEXP (x, 1)) >= p->base_offset
760 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
761 return p;
765 return NULL;
768 /* Allocate a temporary stack slot and record it for possible later
769 reuse.
771 MODE is the machine mode to be given to the returned rtx.
773 SIZE is the size in units of the space required. We do no rounding here
774 since assign_stack_local will do any required rounding.
776 KEEP is 1 if this slot is to be retained after a call to
777 free_temp_slots. Automatic variables for a block are allocated
778 with this flag. KEEP values of 2 or 3 were needed respectively
779 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
780 or for SAVE_EXPRs, but they are now unused.
782 TYPE is the type that will be used for the stack slot. */
785 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
786 int keep, tree type)
788 unsigned int align;
789 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
790 rtx slot;
792 /* If SIZE is -1 it means that somebody tried to allocate a temporary
793 of a variable size. */
794 gcc_assert (size != -1);
796 /* These are now unused. */
797 gcc_assert (keep <= 1);
799 align = get_stack_local_alignment (type, mode);
801 /* Try to find an available, already-allocated temporary of the proper
802 mode which meets the size and alignment requirements. Choose the
803 smallest one with the closest alignment.
805 If assign_stack_temp is called outside of the tree->rtl expansion,
806 we cannot reuse the stack slots (that may still refer to
807 VIRTUAL_STACK_VARS_REGNUM). */
808 if (!virtuals_instantiated)
810 for (p = avail_temp_slots; p; p = p->next)
812 if (p->align >= align && p->size >= size
813 && GET_MODE (p->slot) == mode
814 && objects_must_conflict_p (p->type, type)
815 && (best_p == 0 || best_p->size > p->size
816 || (best_p->size == p->size && best_p->align > p->align)))
818 if (p->align == align && p->size == size)
820 selected = p;
821 cut_slot_from_list (selected, &avail_temp_slots);
822 best_p = 0;
823 break;
825 best_p = p;
830 /* Make our best, if any, the one to use. */
831 if (best_p)
833 selected = best_p;
834 cut_slot_from_list (selected, &avail_temp_slots);
836 /* If there are enough aligned bytes left over, make them into a new
837 temp_slot so that the extra bytes don't get wasted. Do this only
838 for BLKmode slots, so that we can be sure of the alignment. */
839 if (GET_MODE (best_p->slot) == BLKmode)
841 int alignment = best_p->align / BITS_PER_UNIT;
842 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
844 if (best_p->size - rounded_size >= alignment)
846 p = ggc_alloc_temp_slot ();
847 p->in_use = p->addr_taken = 0;
848 p->size = best_p->size - rounded_size;
849 p->base_offset = best_p->base_offset + rounded_size;
850 p->full_size = best_p->full_size - rounded_size;
851 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
852 p->align = best_p->align;
853 p->type = best_p->type;
854 insert_slot_to_list (p, &avail_temp_slots);
856 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
857 stack_slot_list);
859 best_p->size = rounded_size;
860 best_p->full_size = rounded_size;
865 /* If we still didn't find one, make a new temporary. */
866 if (selected == 0)
868 HOST_WIDE_INT frame_offset_old = frame_offset;
870 p = ggc_alloc_temp_slot ();
872 /* We are passing an explicit alignment request to assign_stack_local.
873 One side effect of that is assign_stack_local will not round SIZE
874 to ensure the frame offset remains suitably aligned.
876 So for requests which depended on the rounding of SIZE, we go ahead
877 and round it now. We also make sure ALIGNMENT is at least
878 BIGGEST_ALIGNMENT. */
879 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
880 p->slot = assign_stack_local_1 (mode,
881 (mode == BLKmode
882 ? CEIL_ROUND (size,
883 (int) align
884 / BITS_PER_UNIT)
885 : size),
886 align, 0);
888 p->align = align;
890 /* The following slot size computation is necessary because we don't
891 know the actual size of the temporary slot until assign_stack_local
892 has performed all the frame alignment and size rounding for the
893 requested temporary. Note that extra space added for alignment
894 can be either above or below this stack slot depending on which
895 way the frame grows. We include the extra space if and only if it
896 is above this slot. */
897 if (FRAME_GROWS_DOWNWARD)
898 p->size = frame_offset_old - frame_offset;
899 else
900 p->size = size;
902 /* Now define the fields used by combine_temp_slots. */
903 if (FRAME_GROWS_DOWNWARD)
905 p->base_offset = frame_offset;
906 p->full_size = frame_offset_old - frame_offset;
908 else
910 p->base_offset = frame_offset_old;
911 p->full_size = frame_offset - frame_offset_old;
914 selected = p;
917 p = selected;
918 p->in_use = 1;
919 p->addr_taken = 0;
920 p->type = type;
921 p->level = temp_slot_level;
922 p->keep = keep;
924 pp = temp_slots_at_level (p->level);
925 insert_slot_to_list (p, pp);
926 insert_temp_slot_address (XEXP (p->slot, 0), p);
928 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
929 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
930 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
932 /* If we know the alias set for the memory that will be used, use
933 it. If there's no TYPE, then we don't know anything about the
934 alias set for the memory. */
935 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
936 set_mem_align (slot, align);
938 /* If a type is specified, set the relevant flags. */
939 if (type != 0)
941 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
942 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
943 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
944 MEM_IN_STRUCT_P (slot) = 1;
945 else
946 MEM_SCALAR_P (slot) = 1;
948 MEM_NOTRAP_P (slot) = 1;
950 return slot;
953 /* Allocate a temporary stack slot and record it for possible later
954 reuse. First three arguments are same as in preceding function. */
957 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
959 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
962 /* Assign a temporary.
963 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
964 and so that should be used in error messages. In either case, we
965 allocate of the given type.
966 KEEP is as for assign_stack_temp.
967 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
968 it is 0 if a register is OK.
969 DONT_PROMOTE is 1 if we should not promote values in register
970 to wider modes. */
973 assign_temp (tree type_or_decl, int keep, int memory_required,
974 int dont_promote ATTRIBUTE_UNUSED)
976 tree type, decl;
977 enum machine_mode mode;
978 #ifdef PROMOTE_MODE
979 int unsignedp;
980 #endif
982 if (DECL_P (type_or_decl))
983 decl = type_or_decl, type = TREE_TYPE (decl);
984 else
985 decl = NULL, type = type_or_decl;
987 mode = TYPE_MODE (type);
988 #ifdef PROMOTE_MODE
989 unsignedp = TYPE_UNSIGNED (type);
990 #endif
992 if (mode == BLKmode || memory_required)
994 HOST_WIDE_INT size = int_size_in_bytes (type);
995 rtx tmp;
997 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
998 problems with allocating the stack space. */
999 if (size == 0)
1000 size = 1;
1002 /* Unfortunately, we don't yet know how to allocate variable-sized
1003 temporaries. However, sometimes we can find a fixed upper limit on
1004 the size, so try that instead. */
1005 else if (size == -1)
1006 size = max_int_size_in_bytes (type);
1008 /* The size of the temporary may be too large to fit into an integer. */
1009 /* ??? Not sure this should happen except for user silliness, so limit
1010 this to things that aren't compiler-generated temporaries. The
1011 rest of the time we'll die in assign_stack_temp_for_type. */
1012 if (decl && size == -1
1013 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1015 error ("size of variable %q+D is too large", decl);
1016 size = 1;
1019 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1020 return tmp;
1023 #ifdef PROMOTE_MODE
1024 if (! dont_promote)
1025 mode = promote_mode (type, mode, &unsignedp);
1026 #endif
1028 return gen_reg_rtx (mode);
1031 /* Combine temporary stack slots which are adjacent on the stack.
1033 This allows for better use of already allocated stack space. This is only
1034 done for BLKmode slots because we can be sure that we won't have alignment
1035 problems in this case. */
1037 static void
1038 combine_temp_slots (void)
1040 struct temp_slot *p, *q, *next, *next_q;
1041 int num_slots;
1043 /* We can't combine slots, because the information about which slot
1044 is in which alias set will be lost. */
1045 if (flag_strict_aliasing)
1046 return;
1048 /* If there are a lot of temp slots, don't do anything unless
1049 high levels of optimization. */
1050 if (! flag_expensive_optimizations)
1051 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1052 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1053 return;
1055 for (p = avail_temp_slots; p; p = next)
1057 int delete_p = 0;
1059 next = p->next;
1061 if (GET_MODE (p->slot) != BLKmode)
1062 continue;
1064 for (q = p->next; q; q = next_q)
1066 int delete_q = 0;
1068 next_q = q->next;
1070 if (GET_MODE (q->slot) != BLKmode)
1071 continue;
1073 if (p->base_offset + p->full_size == q->base_offset)
1075 /* Q comes after P; combine Q into P. */
1076 p->size += q->size;
1077 p->full_size += q->full_size;
1078 delete_q = 1;
1080 else if (q->base_offset + q->full_size == p->base_offset)
1082 /* P comes after Q; combine P into Q. */
1083 q->size += p->size;
1084 q->full_size += p->full_size;
1085 delete_p = 1;
1086 break;
1088 if (delete_q)
1089 cut_slot_from_list (q, &avail_temp_slots);
1092 /* Either delete P or advance past it. */
1093 if (delete_p)
1094 cut_slot_from_list (p, &avail_temp_slots);
1098 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1099 slot that previously was known by OLD_RTX. */
1101 void
1102 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1104 struct temp_slot *p;
1106 if (rtx_equal_p (old_rtx, new_rtx))
1107 return;
1109 p = find_temp_slot_from_address (old_rtx);
1111 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1112 NEW_RTX is a register, see if one operand of the PLUS is a
1113 temporary location. If so, NEW_RTX points into it. Otherwise,
1114 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1115 in common between them. If so, try a recursive call on those
1116 values. */
1117 if (p == 0)
1119 if (GET_CODE (old_rtx) != PLUS)
1120 return;
1122 if (REG_P (new_rtx))
1124 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1125 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1126 return;
1128 else if (GET_CODE (new_rtx) != PLUS)
1129 return;
1131 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1132 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1133 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1134 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1135 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1136 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1137 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1138 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1140 return;
1143 /* Otherwise add an alias for the temp's address. */
1144 insert_temp_slot_address (new_rtx, p);
1147 /* If X could be a reference to a temporary slot, mark the fact that its
1148 address was taken. */
1150 void
1151 mark_temp_addr_taken (rtx x)
1153 struct temp_slot *p;
1155 if (x == 0)
1156 return;
1158 /* If X is not in memory or is at a constant address, it cannot be in
1159 a temporary slot. */
1160 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1161 return;
1163 p = find_temp_slot_from_address (XEXP (x, 0));
1164 if (p != 0)
1165 p->addr_taken = 1;
1168 /* If X could be a reference to a temporary slot, mark that slot as
1169 belonging to the to one level higher than the current level. If X
1170 matched one of our slots, just mark that one. Otherwise, we can't
1171 easily predict which it is, so upgrade all of them. Kept slots
1172 need not be touched.
1174 This is called when an ({...}) construct occurs and a statement
1175 returns a value in memory. */
1177 void
1178 preserve_temp_slots (rtx x)
1180 struct temp_slot *p = 0, *next;
1182 /* If there is no result, we still might have some objects whose address
1183 were taken, so we need to make sure they stay around. */
1184 if (x == 0)
1186 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1188 next = p->next;
1190 if (p->addr_taken)
1191 move_slot_to_level (p, temp_slot_level - 1);
1194 return;
1197 /* If X is a register that is being used as a pointer, see if we have
1198 a temporary slot we know it points to. To be consistent with
1199 the code below, we really should preserve all non-kept slots
1200 if we can't find a match, but that seems to be much too costly. */
1201 if (REG_P (x) && REG_POINTER (x))
1202 p = find_temp_slot_from_address (x);
1204 /* If X is not in memory or is at a constant address, it cannot be in
1205 a temporary slot, but it can contain something whose address was
1206 taken. */
1207 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1209 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1211 next = p->next;
1213 if (p->addr_taken)
1214 move_slot_to_level (p, temp_slot_level - 1);
1217 return;
1220 /* First see if we can find a match. */
1221 if (p == 0)
1222 p = find_temp_slot_from_address (XEXP (x, 0));
1224 if (p != 0)
1226 /* Move everything at our level whose address was taken to our new
1227 level in case we used its address. */
1228 struct temp_slot *q;
1230 if (p->level == temp_slot_level)
1232 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1234 next = q->next;
1236 if (p != q && q->addr_taken)
1237 move_slot_to_level (q, temp_slot_level - 1);
1240 move_slot_to_level (p, temp_slot_level - 1);
1241 p->addr_taken = 0;
1243 return;
1246 /* Otherwise, preserve all non-kept slots at this level. */
1247 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1249 next = p->next;
1251 if (!p->keep)
1252 move_slot_to_level (p, temp_slot_level - 1);
1256 /* Free all temporaries used so far. This is normally called at the
1257 end of generating code for a statement. */
1259 void
1260 free_temp_slots (void)
1262 struct temp_slot *p, *next;
1263 bool some_available = false;
1265 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1267 next = p->next;
1269 if (!p->keep)
1271 make_slot_available (p);
1272 some_available = true;
1276 if (some_available)
1278 remove_unused_temp_slot_addresses ();
1279 combine_temp_slots ();
1283 /* Push deeper into the nesting level for stack temporaries. */
1285 void
1286 push_temp_slots (void)
1288 temp_slot_level++;
1291 /* Pop a temporary nesting level. All slots in use in the current level
1292 are freed. */
1294 void
1295 pop_temp_slots (void)
1297 struct temp_slot *p, *next;
1298 bool some_available = false;
1300 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1302 next = p->next;
1303 make_slot_available (p);
1304 some_available = true;
1307 if (some_available)
1309 remove_unused_temp_slot_addresses ();
1310 combine_temp_slots ();
1313 temp_slot_level--;
1316 /* Initialize temporary slots. */
1318 void
1319 init_temp_slots (void)
1321 /* We have not allocated any temporaries yet. */
1322 avail_temp_slots = 0;
1323 used_temp_slots = 0;
1324 temp_slot_level = 0;
1326 /* Set up the table to map addresses to temp slots. */
1327 if (! temp_slot_address_table)
1328 temp_slot_address_table = htab_create_ggc (32,
1329 temp_slot_address_hash,
1330 temp_slot_address_eq,
1331 NULL);
1332 else
1333 htab_empty (temp_slot_address_table);
1336 /* These routines are responsible for converting virtual register references
1337 to the actual hard register references once RTL generation is complete.
1339 The following four variables are used for communication between the
1340 routines. They contain the offsets of the virtual registers from their
1341 respective hard registers. */
1343 static int in_arg_offset;
1344 static int var_offset;
1345 static int dynamic_offset;
1346 static int out_arg_offset;
1347 static int cfa_offset;
1349 /* In most machines, the stack pointer register is equivalent to the bottom
1350 of the stack. */
1352 #ifndef STACK_POINTER_OFFSET
1353 #define STACK_POINTER_OFFSET 0
1354 #endif
1356 /* If not defined, pick an appropriate default for the offset of dynamically
1357 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1358 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1360 #ifndef STACK_DYNAMIC_OFFSET
1362 /* The bottom of the stack points to the actual arguments. If
1363 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1364 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1365 stack space for register parameters is not pushed by the caller, but
1366 rather part of the fixed stack areas and hence not included in
1367 `crtl->outgoing_args_size'. Nevertheless, we must allow
1368 for it when allocating stack dynamic objects. */
1370 #if defined(REG_PARM_STACK_SPACE)
1371 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1372 ((ACCUMULATE_OUTGOING_ARGS \
1373 ? (crtl->outgoing_args_size \
1374 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1375 : REG_PARM_STACK_SPACE (FNDECL))) \
1376 : 0) + (STACK_POINTER_OFFSET))
1377 #else
1378 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1379 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1380 + (STACK_POINTER_OFFSET))
1381 #endif
1382 #endif
1385 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1386 is a virtual register, return the equivalent hard register and set the
1387 offset indirectly through the pointer. Otherwise, return 0. */
1389 static rtx
1390 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1392 rtx new_rtx;
1393 HOST_WIDE_INT offset;
1395 if (x == virtual_incoming_args_rtx)
1397 if (stack_realign_drap)
1399 /* Replace virtual_incoming_args_rtx with internal arg
1400 pointer if DRAP is used to realign stack. */
1401 new_rtx = crtl->args.internal_arg_pointer;
1402 offset = 0;
1404 else
1405 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1407 else if (x == virtual_stack_vars_rtx)
1408 new_rtx = frame_pointer_rtx, offset = var_offset;
1409 else if (x == virtual_stack_dynamic_rtx)
1410 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1411 else if (x == virtual_outgoing_args_rtx)
1412 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1413 else if (x == virtual_cfa_rtx)
1415 #ifdef FRAME_POINTER_CFA_OFFSET
1416 new_rtx = frame_pointer_rtx;
1417 #else
1418 new_rtx = arg_pointer_rtx;
1419 #endif
1420 offset = cfa_offset;
1422 else if (x == virtual_preferred_stack_boundary_rtx)
1424 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1425 offset = 0;
1427 else
1428 return NULL_RTX;
1430 *poffset = offset;
1431 return new_rtx;
1434 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1435 Instantiate any virtual registers present inside of *LOC. The expression
1436 is simplified, as much as possible, but is not to be considered "valid"
1437 in any sense implied by the target. If any change is made, set CHANGED
1438 to true. */
1440 static int
1441 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1443 HOST_WIDE_INT offset;
1444 bool *changed = (bool *) data;
1445 rtx x, new_rtx;
1447 x = *loc;
1448 if (x == 0)
1449 return 0;
1451 switch (GET_CODE (x))
1453 case REG:
1454 new_rtx = instantiate_new_reg (x, &offset);
1455 if (new_rtx)
1457 *loc = plus_constant (new_rtx, offset);
1458 if (changed)
1459 *changed = true;
1461 return -1;
1463 case PLUS:
1464 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1465 if (new_rtx)
1467 new_rtx = plus_constant (new_rtx, offset);
1468 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1469 if (changed)
1470 *changed = true;
1471 return -1;
1474 /* FIXME -- from old code */
1475 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1476 we can commute the PLUS and SUBREG because pointers into the
1477 frame are well-behaved. */
1478 break;
1480 default:
1481 break;
1484 return 0;
1487 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1488 matches the predicate for insn CODE operand OPERAND. */
1490 static int
1491 safe_insn_predicate (int code, int operand, rtx x)
1493 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1496 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1497 registers present inside of insn. The result will be a valid insn. */
1499 static void
1500 instantiate_virtual_regs_in_insn (rtx insn)
1502 HOST_WIDE_INT offset;
1503 int insn_code, i;
1504 bool any_change = false;
1505 rtx set, new_rtx, x, seq;
1507 /* There are some special cases to be handled first. */
1508 set = single_set (insn);
1509 if (set)
1511 /* We're allowed to assign to a virtual register. This is interpreted
1512 to mean that the underlying register gets assigned the inverse
1513 transformation. This is used, for example, in the handling of
1514 non-local gotos. */
1515 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1516 if (new_rtx)
1518 start_sequence ();
1520 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1521 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1522 GEN_INT (-offset));
1523 x = force_operand (x, new_rtx);
1524 if (x != new_rtx)
1525 emit_move_insn (new_rtx, x);
1527 seq = get_insns ();
1528 end_sequence ();
1530 emit_insn_before (seq, insn);
1531 delete_insn (insn);
1532 return;
1535 /* Handle a straight copy from a virtual register by generating a
1536 new add insn. The difference between this and falling through
1537 to the generic case is avoiding a new pseudo and eliminating a
1538 move insn in the initial rtl stream. */
1539 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1540 if (new_rtx && offset != 0
1541 && REG_P (SET_DEST (set))
1542 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1544 start_sequence ();
1546 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1547 new_rtx, GEN_INT (offset), SET_DEST (set),
1548 1, OPTAB_LIB_WIDEN);
1549 if (x != SET_DEST (set))
1550 emit_move_insn (SET_DEST (set), x);
1552 seq = get_insns ();
1553 end_sequence ();
1555 emit_insn_before (seq, insn);
1556 delete_insn (insn);
1557 return;
1560 extract_insn (insn);
1561 insn_code = INSN_CODE (insn);
1563 /* Handle a plus involving a virtual register by determining if the
1564 operands remain valid if they're modified in place. */
1565 if (GET_CODE (SET_SRC (set)) == PLUS
1566 && recog_data.n_operands >= 3
1567 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1568 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1569 && CONST_INT_P (recog_data.operand[2])
1570 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1572 offset += INTVAL (recog_data.operand[2]);
1574 /* If the sum is zero, then replace with a plain move. */
1575 if (offset == 0
1576 && REG_P (SET_DEST (set))
1577 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1579 start_sequence ();
1580 emit_move_insn (SET_DEST (set), new_rtx);
1581 seq = get_insns ();
1582 end_sequence ();
1584 emit_insn_before (seq, insn);
1585 delete_insn (insn);
1586 return;
1589 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1591 /* Using validate_change and apply_change_group here leaves
1592 recog_data in an invalid state. Since we know exactly what
1593 we want to check, do those two by hand. */
1594 if (safe_insn_predicate (insn_code, 1, new_rtx)
1595 && safe_insn_predicate (insn_code, 2, x))
1597 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1598 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1599 any_change = true;
1601 /* Fall through into the regular operand fixup loop in
1602 order to take care of operands other than 1 and 2. */
1606 else
1608 extract_insn (insn);
1609 insn_code = INSN_CODE (insn);
1612 /* In the general case, we expect virtual registers to appear only in
1613 operands, and then only as either bare registers or inside memories. */
1614 for (i = 0; i < recog_data.n_operands; ++i)
1616 x = recog_data.operand[i];
1617 switch (GET_CODE (x))
1619 case MEM:
1621 rtx addr = XEXP (x, 0);
1622 bool changed = false;
1624 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1625 if (!changed)
1626 continue;
1628 start_sequence ();
1629 x = replace_equiv_address (x, addr);
1630 /* It may happen that the address with the virtual reg
1631 was valid (e.g. based on the virtual stack reg, which might
1632 be acceptable to the predicates with all offsets), whereas
1633 the address now isn't anymore, for instance when the address
1634 is still offsetted, but the base reg isn't virtual-stack-reg
1635 anymore. Below we would do a force_reg on the whole operand,
1636 but this insn might actually only accept memory. Hence,
1637 before doing that last resort, try to reload the address into
1638 a register, so this operand stays a MEM. */
1639 if (!safe_insn_predicate (insn_code, i, x))
1641 addr = force_reg (GET_MODE (addr), addr);
1642 x = replace_equiv_address (x, addr);
1644 seq = get_insns ();
1645 end_sequence ();
1646 if (seq)
1647 emit_insn_before (seq, insn);
1649 break;
1651 case REG:
1652 new_rtx = instantiate_new_reg (x, &offset);
1653 if (new_rtx == NULL)
1654 continue;
1655 if (offset == 0)
1656 x = new_rtx;
1657 else
1659 start_sequence ();
1661 /* Careful, special mode predicates may have stuff in
1662 insn_data[insn_code].operand[i].mode that isn't useful
1663 to us for computing a new value. */
1664 /* ??? Recognize address_operand and/or "p" constraints
1665 to see if (plus new offset) is a valid before we put
1666 this through expand_simple_binop. */
1667 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1668 GEN_INT (offset), NULL_RTX,
1669 1, OPTAB_LIB_WIDEN);
1670 seq = get_insns ();
1671 end_sequence ();
1672 emit_insn_before (seq, insn);
1674 break;
1676 case SUBREG:
1677 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1678 if (new_rtx == NULL)
1679 continue;
1680 if (offset != 0)
1682 start_sequence ();
1683 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1684 GEN_INT (offset), NULL_RTX,
1685 1, OPTAB_LIB_WIDEN);
1686 seq = get_insns ();
1687 end_sequence ();
1688 emit_insn_before (seq, insn);
1690 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1691 GET_MODE (new_rtx), SUBREG_BYTE (x));
1692 gcc_assert (x);
1693 break;
1695 default:
1696 continue;
1699 /* At this point, X contains the new value for the operand.
1700 Validate the new value vs the insn predicate. Note that
1701 asm insns will have insn_code -1 here. */
1702 if (!safe_insn_predicate (insn_code, i, x))
1704 start_sequence ();
1705 if (REG_P (x))
1707 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1708 x = copy_to_reg (x);
1710 else
1711 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1712 seq = get_insns ();
1713 end_sequence ();
1714 if (seq)
1715 emit_insn_before (seq, insn);
1718 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1719 any_change = true;
1722 if (any_change)
1724 /* Propagate operand changes into the duplicates. */
1725 for (i = 0; i < recog_data.n_dups; ++i)
1726 *recog_data.dup_loc[i]
1727 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1729 /* Force re-recognition of the instruction for validation. */
1730 INSN_CODE (insn) = -1;
1733 if (asm_noperands (PATTERN (insn)) >= 0)
1735 if (!check_asm_operands (PATTERN (insn)))
1737 error_for_asm (insn, "impossible constraint in %<asm%>");
1738 delete_insn (insn);
1741 else
1743 if (recog_memoized (insn) < 0)
1744 fatal_insn_not_found (insn);
1748 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1749 do any instantiation required. */
1751 void
1752 instantiate_decl_rtl (rtx x)
1754 rtx addr;
1756 if (x == 0)
1757 return;
1759 /* If this is a CONCAT, recurse for the pieces. */
1760 if (GET_CODE (x) == CONCAT)
1762 instantiate_decl_rtl (XEXP (x, 0));
1763 instantiate_decl_rtl (XEXP (x, 1));
1764 return;
1767 /* If this is not a MEM, no need to do anything. Similarly if the
1768 address is a constant or a register that is not a virtual register. */
1769 if (!MEM_P (x))
1770 return;
1772 addr = XEXP (x, 0);
1773 if (CONSTANT_P (addr)
1774 || (REG_P (addr)
1775 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1776 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1777 return;
1779 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1782 /* Helper for instantiate_decls called via walk_tree: Process all decls
1783 in the given DECL_VALUE_EXPR. */
1785 static tree
1786 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1788 tree t = *tp;
1789 if (! EXPR_P (t))
1791 *walk_subtrees = 0;
1792 if (DECL_P (t))
1794 if (DECL_RTL_SET_P (t))
1795 instantiate_decl_rtl (DECL_RTL (t));
1796 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1797 && DECL_INCOMING_RTL (t))
1798 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1799 if ((TREE_CODE (t) == VAR_DECL
1800 || TREE_CODE (t) == RESULT_DECL)
1801 && DECL_HAS_VALUE_EXPR_P (t))
1803 tree v = DECL_VALUE_EXPR (t);
1804 walk_tree (&v, instantiate_expr, NULL, NULL);
1808 return NULL;
1811 /* Subroutine of instantiate_decls: Process all decls in the given
1812 BLOCK node and all its subblocks. */
1814 static void
1815 instantiate_decls_1 (tree let)
1817 tree t;
1819 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1821 if (DECL_RTL_SET_P (t))
1822 instantiate_decl_rtl (DECL_RTL (t));
1823 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1825 tree v = DECL_VALUE_EXPR (t);
1826 walk_tree (&v, instantiate_expr, NULL, NULL);
1830 /* Process all subblocks. */
1831 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1832 instantiate_decls_1 (t);
1835 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1836 all virtual registers in their DECL_RTL's. */
1838 static void
1839 instantiate_decls (tree fndecl)
1841 tree decl;
1842 unsigned ix;
1844 /* Process all parameters of the function. */
1845 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1847 instantiate_decl_rtl (DECL_RTL (decl));
1848 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1849 if (DECL_HAS_VALUE_EXPR_P (decl))
1851 tree v = DECL_VALUE_EXPR (decl);
1852 walk_tree (&v, instantiate_expr, NULL, NULL);
1856 if ((decl = DECL_RESULT (fndecl))
1857 && TREE_CODE (decl) == RESULT_DECL)
1859 if (DECL_RTL_SET_P (decl))
1860 instantiate_decl_rtl (DECL_RTL (decl));
1861 if (DECL_HAS_VALUE_EXPR_P (decl))
1863 tree v = DECL_VALUE_EXPR (decl);
1864 walk_tree (&v, instantiate_expr, NULL, NULL);
1868 /* Now process all variables defined in the function or its subblocks. */
1869 instantiate_decls_1 (DECL_INITIAL (fndecl));
1871 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1872 if (DECL_RTL_SET_P (decl))
1873 instantiate_decl_rtl (DECL_RTL (decl));
1874 VEC_free (tree, gc, cfun->local_decls);
1877 /* Pass through the INSNS of function FNDECL and convert virtual register
1878 references to hard register references. */
1880 static unsigned int
1881 instantiate_virtual_regs (void)
1883 rtx insn;
1885 /* Compute the offsets to use for this function. */
1886 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1887 var_offset = STARTING_FRAME_OFFSET;
1888 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1889 out_arg_offset = STACK_POINTER_OFFSET;
1890 #ifdef FRAME_POINTER_CFA_OFFSET
1891 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1892 #else
1893 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1894 #endif
1896 /* Initialize recognition, indicating that volatile is OK. */
1897 init_recog ();
1899 /* Scan through all the insns, instantiating every virtual register still
1900 present. */
1901 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1902 if (INSN_P (insn))
1904 /* These patterns in the instruction stream can never be recognized.
1905 Fortunately, they shouldn't contain virtual registers either. */
1906 if (GET_CODE (PATTERN (insn)) == USE
1907 || GET_CODE (PATTERN (insn)) == CLOBBER
1908 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1909 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1910 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1911 continue;
1912 else if (DEBUG_INSN_P (insn))
1913 for_each_rtx (&INSN_VAR_LOCATION (insn),
1914 instantiate_virtual_regs_in_rtx, NULL);
1915 else
1916 instantiate_virtual_regs_in_insn (insn);
1918 if (INSN_DELETED_P (insn))
1919 continue;
1921 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1923 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1924 if (CALL_P (insn))
1925 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1926 instantiate_virtual_regs_in_rtx, NULL);
1929 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1930 instantiate_decls (current_function_decl);
1932 targetm.instantiate_decls ();
1934 /* Indicate that, from now on, assign_stack_local should use
1935 frame_pointer_rtx. */
1936 virtuals_instantiated = 1;
1938 return 0;
1941 struct rtl_opt_pass pass_instantiate_virtual_regs =
1944 RTL_PASS,
1945 "vregs", /* name */
1946 NULL, /* gate */
1947 instantiate_virtual_regs, /* execute */
1948 NULL, /* sub */
1949 NULL, /* next */
1950 0, /* static_pass_number */
1951 TV_NONE, /* tv_id */
1952 0, /* properties_required */
1953 0, /* properties_provided */
1954 0, /* properties_destroyed */
1955 0, /* todo_flags_start */
1956 0 /* todo_flags_finish */
1961 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1962 This means a type for which function calls must pass an address to the
1963 function or get an address back from the function.
1964 EXP may be a type node or an expression (whose type is tested). */
1967 aggregate_value_p (const_tree exp, const_tree fntype)
1969 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1970 int i, regno, nregs;
1971 rtx reg;
1973 if (fntype)
1974 switch (TREE_CODE (fntype))
1976 case CALL_EXPR:
1978 tree fndecl = get_callee_fndecl (fntype);
1979 fntype = (fndecl
1980 ? TREE_TYPE (fndecl)
1981 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1983 break;
1984 case FUNCTION_DECL:
1985 fntype = TREE_TYPE (fntype);
1986 break;
1987 case FUNCTION_TYPE:
1988 case METHOD_TYPE:
1989 break;
1990 case IDENTIFIER_NODE:
1991 fntype = NULL_TREE;
1992 break;
1993 default:
1994 /* We don't expect other tree types here. */
1995 gcc_unreachable ();
1998 if (VOID_TYPE_P (type))
1999 return 0;
2001 /* If a record should be passed the same as its first (and only) member
2002 don't pass it as an aggregate. */
2003 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2004 return aggregate_value_p (first_field (type), fntype);
2006 /* If the front end has decided that this needs to be passed by
2007 reference, do so. */
2008 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2009 && DECL_BY_REFERENCE (exp))
2010 return 1;
2012 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2013 if (fntype && TREE_ADDRESSABLE (fntype))
2014 return 1;
2016 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2017 and thus can't be returned in registers. */
2018 if (TREE_ADDRESSABLE (type))
2019 return 1;
2021 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2022 return 1;
2024 if (targetm.calls.return_in_memory (type, fntype))
2025 return 1;
2027 /* Make sure we have suitable call-clobbered regs to return
2028 the value in; if not, we must return it in memory. */
2029 reg = hard_function_value (type, 0, fntype, 0);
2031 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2032 it is OK. */
2033 if (!REG_P (reg))
2034 return 0;
2036 regno = REGNO (reg);
2037 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2038 for (i = 0; i < nregs; i++)
2039 if (! call_used_regs[regno + i])
2040 return 1;
2042 return 0;
2045 /* Return true if we should assign DECL a pseudo register; false if it
2046 should live on the local stack. */
2048 bool
2049 use_register_for_decl (const_tree decl)
2051 if (!targetm.calls.allocate_stack_slots_for_args())
2052 return true;
2054 /* Honor volatile. */
2055 if (TREE_SIDE_EFFECTS (decl))
2056 return false;
2058 /* Honor addressability. */
2059 if (TREE_ADDRESSABLE (decl))
2060 return false;
2062 /* Only register-like things go in registers. */
2063 if (DECL_MODE (decl) == BLKmode)
2064 return false;
2066 /* If -ffloat-store specified, don't put explicit float variables
2067 into registers. */
2068 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2069 propagates values across these stores, and it probably shouldn't. */
2070 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2071 return false;
2073 /* If we're not interested in tracking debugging information for
2074 this decl, then we can certainly put it in a register. */
2075 if (DECL_IGNORED_P (decl))
2076 return true;
2078 if (optimize)
2079 return true;
2081 if (!DECL_REGISTER (decl))
2082 return false;
2084 switch (TREE_CODE (TREE_TYPE (decl)))
2086 case RECORD_TYPE:
2087 case UNION_TYPE:
2088 case QUAL_UNION_TYPE:
2089 /* When not optimizing, disregard register keyword for variables with
2090 types containing methods, otherwise the methods won't be callable
2091 from the debugger. */
2092 if (TYPE_METHODS (TREE_TYPE (decl)))
2093 return false;
2094 break;
2095 default:
2096 break;
2099 return true;
2102 /* Return true if TYPE should be passed by invisible reference. */
2104 bool
2105 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2106 tree type, bool named_arg)
2108 if (type)
2110 /* If this type contains non-trivial constructors, then it is
2111 forbidden for the middle-end to create any new copies. */
2112 if (TREE_ADDRESSABLE (type))
2113 return true;
2115 /* GCC post 3.4 passes *all* variable sized types by reference. */
2116 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2117 return true;
2119 /* If a record type should be passed the same as its first (and only)
2120 member, use the type and mode of that member. */
2121 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2123 type = TREE_TYPE (first_field (type));
2124 mode = TYPE_MODE (type);
2128 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2129 type, named_arg);
2132 /* Return true if TYPE, which is passed by reference, should be callee
2133 copied instead of caller copied. */
2135 bool
2136 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2137 tree type, bool named_arg)
2139 if (type && TREE_ADDRESSABLE (type))
2140 return false;
2141 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2142 named_arg);
2145 /* Structures to communicate between the subroutines of assign_parms.
2146 The first holds data persistent across all parameters, the second
2147 is cleared out for each parameter. */
2149 struct assign_parm_data_all
2151 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2152 should become a job of the target or otherwise encapsulated. */
2153 CUMULATIVE_ARGS args_so_far_v;
2154 cumulative_args_t args_so_far;
2155 struct args_size stack_args_size;
2156 tree function_result_decl;
2157 tree orig_fnargs;
2158 rtx first_conversion_insn;
2159 rtx last_conversion_insn;
2160 HOST_WIDE_INT pretend_args_size;
2161 HOST_WIDE_INT extra_pretend_bytes;
2162 int reg_parm_stack_space;
2165 struct assign_parm_data_one
2167 tree nominal_type;
2168 tree passed_type;
2169 rtx entry_parm;
2170 rtx stack_parm;
2171 enum machine_mode nominal_mode;
2172 enum machine_mode passed_mode;
2173 enum machine_mode promoted_mode;
2174 struct locate_and_pad_arg_data locate;
2175 int partial;
2176 BOOL_BITFIELD named_arg : 1;
2177 BOOL_BITFIELD passed_pointer : 1;
2178 BOOL_BITFIELD on_stack : 1;
2179 BOOL_BITFIELD loaded_in_reg : 1;
2182 /* A subroutine of assign_parms. Initialize ALL. */
2184 static void
2185 assign_parms_initialize_all (struct assign_parm_data_all *all)
2187 tree fntype ATTRIBUTE_UNUSED;
2189 memset (all, 0, sizeof (*all));
2191 fntype = TREE_TYPE (current_function_decl);
2193 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2194 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2195 #else
2196 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2197 current_function_decl, -1);
2198 #endif
2199 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2201 #ifdef REG_PARM_STACK_SPACE
2202 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2203 #endif
2206 /* If ARGS contains entries with complex types, split the entry into two
2207 entries of the component type. Return a new list of substitutions are
2208 needed, else the old list. */
2210 static void
2211 split_complex_args (VEC(tree, heap) **args)
2213 unsigned i;
2214 tree p;
2216 FOR_EACH_VEC_ELT (tree, *args, i, p)
2218 tree type = TREE_TYPE (p);
2219 if (TREE_CODE (type) == COMPLEX_TYPE
2220 && targetm.calls.split_complex_arg (type))
2222 tree decl;
2223 tree subtype = TREE_TYPE (type);
2224 bool addressable = TREE_ADDRESSABLE (p);
2226 /* Rewrite the PARM_DECL's type with its component. */
2227 p = copy_node (p);
2228 TREE_TYPE (p) = subtype;
2229 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2230 DECL_MODE (p) = VOIDmode;
2231 DECL_SIZE (p) = NULL;
2232 DECL_SIZE_UNIT (p) = NULL;
2233 /* If this arg must go in memory, put it in a pseudo here.
2234 We can't allow it to go in memory as per normal parms,
2235 because the usual place might not have the imag part
2236 adjacent to the real part. */
2237 DECL_ARTIFICIAL (p) = addressable;
2238 DECL_IGNORED_P (p) = addressable;
2239 TREE_ADDRESSABLE (p) = 0;
2240 layout_decl (p, 0);
2241 VEC_replace (tree, *args, i, p);
2243 /* Build a second synthetic decl. */
2244 decl = build_decl (EXPR_LOCATION (p),
2245 PARM_DECL, NULL_TREE, subtype);
2246 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2247 DECL_ARTIFICIAL (decl) = addressable;
2248 DECL_IGNORED_P (decl) = addressable;
2249 layout_decl (decl, 0);
2250 VEC_safe_insert (tree, heap, *args, ++i, decl);
2255 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2256 the hidden struct return argument, and (abi willing) complex args.
2257 Return the new parameter list. */
2259 static VEC(tree, heap) *
2260 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2262 tree fndecl = current_function_decl;
2263 tree fntype = TREE_TYPE (fndecl);
2264 VEC(tree, heap) *fnargs = NULL;
2265 tree arg;
2267 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2268 VEC_safe_push (tree, heap, fnargs, arg);
2270 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2272 /* If struct value address is treated as the first argument, make it so. */
2273 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2274 && ! cfun->returns_pcc_struct
2275 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2277 tree type = build_pointer_type (TREE_TYPE (fntype));
2278 tree decl;
2280 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2281 PARM_DECL, get_identifier (".result_ptr"), type);
2282 DECL_ARG_TYPE (decl) = type;
2283 DECL_ARTIFICIAL (decl) = 1;
2284 DECL_NAMELESS (decl) = 1;
2285 TREE_CONSTANT (decl) = 1;
2287 DECL_CHAIN (decl) = all->orig_fnargs;
2288 all->orig_fnargs = decl;
2289 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2291 all->function_result_decl = decl;
2294 /* If the target wants to split complex arguments into scalars, do so. */
2295 if (targetm.calls.split_complex_arg)
2296 split_complex_args (&fnargs);
2298 return fnargs;
2301 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2302 data for the parameter. Incorporate ABI specifics such as pass-by-
2303 reference and type promotion. */
2305 static void
2306 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2307 struct assign_parm_data_one *data)
2309 tree nominal_type, passed_type;
2310 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2311 int unsignedp;
2313 memset (data, 0, sizeof (*data));
2315 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2316 if (!cfun->stdarg)
2317 data->named_arg = 1; /* No variadic parms. */
2318 else if (DECL_CHAIN (parm))
2319 data->named_arg = 1; /* Not the last non-variadic parm. */
2320 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2321 data->named_arg = 1; /* Only variadic ones are unnamed. */
2322 else
2323 data->named_arg = 0; /* Treat as variadic. */
2325 nominal_type = TREE_TYPE (parm);
2326 passed_type = DECL_ARG_TYPE (parm);
2328 /* Look out for errors propagating this far. Also, if the parameter's
2329 type is void then its value doesn't matter. */
2330 if (TREE_TYPE (parm) == error_mark_node
2331 /* This can happen after weird syntax errors
2332 or if an enum type is defined among the parms. */
2333 || TREE_CODE (parm) != PARM_DECL
2334 || passed_type == NULL
2335 || VOID_TYPE_P (nominal_type))
2337 nominal_type = passed_type = void_type_node;
2338 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2339 goto egress;
2342 /* Find mode of arg as it is passed, and mode of arg as it should be
2343 during execution of this function. */
2344 passed_mode = TYPE_MODE (passed_type);
2345 nominal_mode = TYPE_MODE (nominal_type);
2347 /* If the parm is to be passed as a transparent union or record, use the
2348 type of the first field for the tests below. We have already verified
2349 that the modes are the same. */
2350 if ((TREE_CODE (passed_type) == UNION_TYPE
2351 || TREE_CODE (passed_type) == RECORD_TYPE)
2352 && TYPE_TRANSPARENT_AGGR (passed_type))
2353 passed_type = TREE_TYPE (first_field (passed_type));
2355 /* See if this arg was passed by invisible reference. */
2356 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2357 passed_type, data->named_arg))
2359 passed_type = nominal_type = build_pointer_type (passed_type);
2360 data->passed_pointer = true;
2361 passed_mode = nominal_mode = Pmode;
2364 /* Find mode as it is passed by the ABI. */
2365 unsignedp = TYPE_UNSIGNED (passed_type);
2366 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2367 TREE_TYPE (current_function_decl), 0);
2369 egress:
2370 data->nominal_type = nominal_type;
2371 data->passed_type = passed_type;
2372 data->nominal_mode = nominal_mode;
2373 data->passed_mode = passed_mode;
2374 data->promoted_mode = promoted_mode;
2377 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2379 static void
2380 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2381 struct assign_parm_data_one *data, bool no_rtl)
2383 int varargs_pretend_bytes = 0;
2385 targetm.calls.setup_incoming_varargs (all->args_so_far,
2386 data->promoted_mode,
2387 data->passed_type,
2388 &varargs_pretend_bytes, no_rtl);
2390 /* If the back-end has requested extra stack space, record how much is
2391 needed. Do not change pretend_args_size otherwise since it may be
2392 nonzero from an earlier partial argument. */
2393 if (varargs_pretend_bytes > 0)
2394 all->pretend_args_size = varargs_pretend_bytes;
2397 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2398 the incoming location of the current parameter. */
2400 static void
2401 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2402 struct assign_parm_data_one *data)
2404 HOST_WIDE_INT pretend_bytes = 0;
2405 rtx entry_parm;
2406 bool in_regs;
2408 if (data->promoted_mode == VOIDmode)
2410 data->entry_parm = data->stack_parm = const0_rtx;
2411 return;
2414 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2415 data->promoted_mode,
2416 data->passed_type,
2417 data->named_arg);
2419 if (entry_parm == 0)
2420 data->promoted_mode = data->passed_mode;
2422 /* Determine parm's home in the stack, in case it arrives in the stack
2423 or we should pretend it did. Compute the stack position and rtx where
2424 the argument arrives and its size.
2426 There is one complexity here: If this was a parameter that would
2427 have been passed in registers, but wasn't only because it is
2428 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2429 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2430 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2431 as it was the previous time. */
2432 in_regs = entry_parm != 0;
2433 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2434 in_regs = true;
2435 #endif
2436 if (!in_regs && !data->named_arg)
2438 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2440 rtx tem;
2441 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2442 data->promoted_mode,
2443 data->passed_type, true);
2444 in_regs = tem != NULL;
2448 /* If this parameter was passed both in registers and in the stack, use
2449 the copy on the stack. */
2450 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2451 data->passed_type))
2452 entry_parm = 0;
2454 if (entry_parm)
2456 int partial;
2458 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2459 data->promoted_mode,
2460 data->passed_type,
2461 data->named_arg);
2462 data->partial = partial;
2464 /* The caller might already have allocated stack space for the
2465 register parameters. */
2466 if (partial != 0 && all->reg_parm_stack_space == 0)
2468 /* Part of this argument is passed in registers and part
2469 is passed on the stack. Ask the prologue code to extend
2470 the stack part so that we can recreate the full value.
2472 PRETEND_BYTES is the size of the registers we need to store.
2473 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2474 stack space that the prologue should allocate.
2476 Internally, gcc assumes that the argument pointer is aligned
2477 to STACK_BOUNDARY bits. This is used both for alignment
2478 optimizations (see init_emit) and to locate arguments that are
2479 aligned to more than PARM_BOUNDARY bits. We must preserve this
2480 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2481 a stack boundary. */
2483 /* We assume at most one partial arg, and it must be the first
2484 argument on the stack. */
2485 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2487 pretend_bytes = partial;
2488 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2490 /* We want to align relative to the actual stack pointer, so
2491 don't include this in the stack size until later. */
2492 all->extra_pretend_bytes = all->pretend_args_size;
2496 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2497 entry_parm ? data->partial : 0, current_function_decl,
2498 &all->stack_args_size, &data->locate);
2500 /* Update parm_stack_boundary if this parameter is passed in the
2501 stack. */
2502 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2503 crtl->parm_stack_boundary = data->locate.boundary;
2505 /* Adjust offsets to include the pretend args. */
2506 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2507 data->locate.slot_offset.constant += pretend_bytes;
2508 data->locate.offset.constant += pretend_bytes;
2510 data->entry_parm = entry_parm;
2513 /* A subroutine of assign_parms. If there is actually space on the stack
2514 for this parm, count it in stack_args_size and return true. */
2516 static bool
2517 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2518 struct assign_parm_data_one *data)
2520 /* Trivially true if we've no incoming register. */
2521 if (data->entry_parm == NULL)
2523 /* Also true if we're partially in registers and partially not,
2524 since we've arranged to drop the entire argument on the stack. */
2525 else if (data->partial != 0)
2527 /* Also true if the target says that it's passed in both registers
2528 and on the stack. */
2529 else if (GET_CODE (data->entry_parm) == PARALLEL
2530 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2532 /* Also true if the target says that there's stack allocated for
2533 all register parameters. */
2534 else if (all->reg_parm_stack_space > 0)
2536 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2537 else
2538 return false;
2540 all->stack_args_size.constant += data->locate.size.constant;
2541 if (data->locate.size.var)
2542 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2544 return true;
2547 /* A subroutine of assign_parms. Given that this parameter is allocated
2548 stack space by the ABI, find it. */
2550 static void
2551 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2553 rtx offset_rtx, stack_parm;
2554 unsigned int align, boundary;
2556 /* If we're passing this arg using a reg, make its stack home the
2557 aligned stack slot. */
2558 if (data->entry_parm)
2559 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2560 else
2561 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2563 stack_parm = crtl->args.internal_arg_pointer;
2564 if (offset_rtx != const0_rtx)
2565 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2566 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2568 if (!data->passed_pointer)
2570 set_mem_attributes (stack_parm, parm, 1);
2571 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2572 while promoted mode's size is needed. */
2573 if (data->promoted_mode != BLKmode
2574 && data->promoted_mode != DECL_MODE (parm))
2576 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2577 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2579 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2580 data->promoted_mode);
2581 if (offset)
2582 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2587 boundary = data->locate.boundary;
2588 align = BITS_PER_UNIT;
2590 /* If we're padding upward, we know that the alignment of the slot
2591 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2592 intentionally forcing upward padding. Otherwise we have to come
2593 up with a guess at the alignment based on OFFSET_RTX. */
2594 if (data->locate.where_pad != downward || data->entry_parm)
2595 align = boundary;
2596 else if (CONST_INT_P (offset_rtx))
2598 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2599 align = align & -align;
2601 set_mem_align (stack_parm, align);
2603 if (data->entry_parm)
2604 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2606 data->stack_parm = stack_parm;
2609 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2610 always valid and contiguous. */
2612 static void
2613 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2615 rtx entry_parm = data->entry_parm;
2616 rtx stack_parm = data->stack_parm;
2618 /* If this parm was passed part in regs and part in memory, pretend it
2619 arrived entirely in memory by pushing the register-part onto the stack.
2620 In the special case of a DImode or DFmode that is split, we could put
2621 it together in a pseudoreg directly, but for now that's not worth
2622 bothering with. */
2623 if (data->partial != 0)
2625 /* Handle calls that pass values in multiple non-contiguous
2626 locations. The Irix 6 ABI has examples of this. */
2627 if (GET_CODE (entry_parm) == PARALLEL)
2628 emit_group_store (validize_mem (stack_parm), entry_parm,
2629 data->passed_type,
2630 int_size_in_bytes (data->passed_type));
2631 else
2633 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2634 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2635 data->partial / UNITS_PER_WORD);
2638 entry_parm = stack_parm;
2641 /* If we didn't decide this parm came in a register, by default it came
2642 on the stack. */
2643 else if (entry_parm == NULL)
2644 entry_parm = stack_parm;
2646 /* When an argument is passed in multiple locations, we can't make use
2647 of this information, but we can save some copying if the whole argument
2648 is passed in a single register. */
2649 else if (GET_CODE (entry_parm) == PARALLEL
2650 && data->nominal_mode != BLKmode
2651 && data->passed_mode != BLKmode)
2653 size_t i, len = XVECLEN (entry_parm, 0);
2655 for (i = 0; i < len; i++)
2656 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2657 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2658 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2659 == data->passed_mode)
2660 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2662 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2663 break;
2667 data->entry_parm = entry_parm;
2670 /* A subroutine of assign_parms. Reconstitute any values which were
2671 passed in multiple registers and would fit in a single register. */
2673 static void
2674 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2676 rtx entry_parm = data->entry_parm;
2678 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2679 This can be done with register operations rather than on the
2680 stack, even if we will store the reconstituted parameter on the
2681 stack later. */
2682 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2684 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2685 emit_group_store (parmreg, entry_parm, data->passed_type,
2686 GET_MODE_SIZE (GET_MODE (entry_parm)));
2687 entry_parm = parmreg;
2690 data->entry_parm = entry_parm;
2693 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2694 always valid and properly aligned. */
2696 static void
2697 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2699 rtx stack_parm = data->stack_parm;
2701 /* If we can't trust the parm stack slot to be aligned enough for its
2702 ultimate type, don't use that slot after entry. We'll make another
2703 stack slot, if we need one. */
2704 if (stack_parm
2705 && ((STRICT_ALIGNMENT
2706 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2707 || (data->nominal_type
2708 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2709 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2710 stack_parm = NULL;
2712 /* If parm was passed in memory, and we need to convert it on entry,
2713 don't store it back in that same slot. */
2714 else if (data->entry_parm == stack_parm
2715 && data->nominal_mode != BLKmode
2716 && data->nominal_mode != data->passed_mode)
2717 stack_parm = NULL;
2719 /* If stack protection is in effect for this function, don't leave any
2720 pointers in their passed stack slots. */
2721 else if (crtl->stack_protect_guard
2722 && (flag_stack_protect == 2
2723 || data->passed_pointer
2724 || POINTER_TYPE_P (data->nominal_type)))
2725 stack_parm = NULL;
2727 data->stack_parm = stack_parm;
2730 /* A subroutine of assign_parms. Return true if the current parameter
2731 should be stored as a BLKmode in the current frame. */
2733 static bool
2734 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2736 if (data->nominal_mode == BLKmode)
2737 return true;
2738 if (GET_MODE (data->entry_parm) == BLKmode)
2739 return true;
2741 #ifdef BLOCK_REG_PADDING
2742 /* Only assign_parm_setup_block knows how to deal with register arguments
2743 that are padded at the least significant end. */
2744 if (REG_P (data->entry_parm)
2745 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2746 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2747 == (BYTES_BIG_ENDIAN ? upward : downward)))
2748 return true;
2749 #endif
2751 return false;
2754 /* A subroutine of assign_parms. Arrange for the parameter to be
2755 present and valid in DATA->STACK_RTL. */
2757 static void
2758 assign_parm_setup_block (struct assign_parm_data_all *all,
2759 tree parm, struct assign_parm_data_one *data)
2761 rtx entry_parm = data->entry_parm;
2762 rtx stack_parm = data->stack_parm;
2763 HOST_WIDE_INT size;
2764 HOST_WIDE_INT size_stored;
2766 if (GET_CODE (entry_parm) == PARALLEL)
2767 entry_parm = emit_group_move_into_temps (entry_parm);
2769 size = int_size_in_bytes (data->passed_type);
2770 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2771 if (stack_parm == 0)
2773 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2774 stack_parm = assign_stack_local (BLKmode, size_stored,
2775 DECL_ALIGN (parm));
2776 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2777 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2778 set_mem_attributes (stack_parm, parm, 1);
2781 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2782 calls that pass values in multiple non-contiguous locations. */
2783 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2785 rtx mem;
2787 /* Note that we will be storing an integral number of words.
2788 So we have to be careful to ensure that we allocate an
2789 integral number of words. We do this above when we call
2790 assign_stack_local if space was not allocated in the argument
2791 list. If it was, this will not work if PARM_BOUNDARY is not
2792 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2793 if it becomes a problem. Exception is when BLKmode arrives
2794 with arguments not conforming to word_mode. */
2796 if (data->stack_parm == 0)
2798 else if (GET_CODE (entry_parm) == PARALLEL)
2800 else
2801 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2803 mem = validize_mem (stack_parm);
2805 /* Handle values in multiple non-contiguous locations. */
2806 if (GET_CODE (entry_parm) == PARALLEL)
2808 push_to_sequence2 (all->first_conversion_insn,
2809 all->last_conversion_insn);
2810 emit_group_store (mem, entry_parm, data->passed_type, size);
2811 all->first_conversion_insn = get_insns ();
2812 all->last_conversion_insn = get_last_insn ();
2813 end_sequence ();
2816 else if (size == 0)
2819 /* If SIZE is that of a mode no bigger than a word, just use
2820 that mode's store operation. */
2821 else if (size <= UNITS_PER_WORD)
2823 enum machine_mode mode
2824 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2826 if (mode != BLKmode
2827 #ifdef BLOCK_REG_PADDING
2828 && (size == UNITS_PER_WORD
2829 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2830 != (BYTES_BIG_ENDIAN ? upward : downward)))
2831 #endif
2834 rtx reg;
2836 /* We are really truncating a word_mode value containing
2837 SIZE bytes into a value of mode MODE. If such an
2838 operation requires no actual instructions, we can refer
2839 to the value directly in mode MODE, otherwise we must
2840 start with the register in word_mode and explicitly
2841 convert it. */
2842 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2843 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2844 else
2846 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2847 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2849 emit_move_insn (change_address (mem, mode, 0), reg);
2852 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2853 machine must be aligned to the left before storing
2854 to memory. Note that the previous test doesn't
2855 handle all cases (e.g. SIZE == 3). */
2856 else if (size != UNITS_PER_WORD
2857 #ifdef BLOCK_REG_PADDING
2858 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2859 == downward)
2860 #else
2861 && BYTES_BIG_ENDIAN
2862 #endif
2865 rtx tem, x;
2866 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2867 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2869 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2870 tem = change_address (mem, word_mode, 0);
2871 emit_move_insn (tem, x);
2873 else
2874 move_block_from_reg (REGNO (entry_parm), mem,
2875 size_stored / UNITS_PER_WORD);
2877 else
2878 move_block_from_reg (REGNO (entry_parm), mem,
2879 size_stored / UNITS_PER_WORD);
2881 else if (data->stack_parm == 0)
2883 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2884 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2885 BLOCK_OP_NORMAL);
2886 all->first_conversion_insn = get_insns ();
2887 all->last_conversion_insn = get_last_insn ();
2888 end_sequence ();
2891 data->stack_parm = stack_parm;
2892 SET_DECL_RTL (parm, stack_parm);
2895 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2896 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2897 which is pointed to by DATA. */
2898 static void
2899 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2901 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2902 if (REG_P (x) && HARD_REGISTER_P (x))
2903 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
2906 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2907 parameter. Get it there. Perform all ABI specified conversions. */
2909 static void
2910 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2911 struct assign_parm_data_one *data)
2913 rtx parmreg, validated_mem;
2914 rtx equiv_stack_parm;
2915 enum machine_mode promoted_nominal_mode;
2916 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2917 bool did_conversion = false;
2918 bool need_conversion, moved;
2920 /* Store the parm in a pseudoregister during the function, but we may
2921 need to do it in a wider mode. Using 2 here makes the result
2922 consistent with promote_decl_mode and thus expand_expr_real_1. */
2923 promoted_nominal_mode
2924 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2925 TREE_TYPE (current_function_decl), 2);
2927 parmreg = gen_reg_rtx (promoted_nominal_mode);
2929 if (!DECL_ARTIFICIAL (parm))
2930 mark_user_reg (parmreg);
2932 /* If this was an item that we received a pointer to,
2933 set DECL_RTL appropriately. */
2934 if (data->passed_pointer)
2936 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2937 set_mem_attributes (x, parm, 1);
2938 SET_DECL_RTL (parm, x);
2940 else
2941 SET_DECL_RTL (parm, parmreg);
2943 assign_parm_remove_parallels (data);
2945 /* Copy the value into the register, thus bridging between
2946 assign_parm_find_data_types and expand_expr_real_1. */
2948 equiv_stack_parm = data->stack_parm;
2949 validated_mem = validize_mem (data->entry_parm);
2951 need_conversion = (data->nominal_mode != data->passed_mode
2952 || promoted_nominal_mode != data->promoted_mode);
2953 moved = false;
2955 if (need_conversion
2956 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2957 && data->nominal_mode == data->passed_mode
2958 && data->nominal_mode == GET_MODE (data->entry_parm))
2960 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2961 mode, by the caller. We now have to convert it to
2962 NOMINAL_MODE, if different. However, PARMREG may be in
2963 a different mode than NOMINAL_MODE if it is being stored
2964 promoted.
2966 If ENTRY_PARM is a hard register, it might be in a register
2967 not valid for operating in its mode (e.g., an odd-numbered
2968 register for a DFmode). In that case, moves are the only
2969 thing valid, so we can't do a convert from there. This
2970 occurs when the calling sequence allow such misaligned
2971 usages.
2973 In addition, the conversion may involve a call, which could
2974 clobber parameters which haven't been copied to pseudo
2975 registers yet.
2977 First, we try to emit an insn which performs the necessary
2978 conversion. We verify that this insn does not clobber any
2979 hard registers. */
2981 enum insn_code icode;
2982 rtx op0, op1;
2984 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2985 unsignedp);
2987 op0 = parmreg;
2988 op1 = validated_mem;
2989 if (icode != CODE_FOR_nothing
2990 && insn_operand_matches (icode, 0, op0)
2991 && insn_operand_matches (icode, 1, op1))
2993 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2994 rtx insn, insns;
2995 HARD_REG_SET hardregs;
2997 start_sequence ();
2998 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
2999 data->passed_mode, unsignedp);
3000 emit_insn (insn);
3001 insns = get_insns ();
3003 moved = true;
3004 CLEAR_HARD_REG_SET (hardregs);
3005 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3007 if (INSN_P (insn))
3008 note_stores (PATTERN (insn), record_hard_reg_sets,
3009 &hardregs);
3010 if (!hard_reg_set_empty_p (hardregs))
3011 moved = false;
3014 end_sequence ();
3016 if (moved)
3018 emit_insn (insns);
3019 if (equiv_stack_parm != NULL_RTX)
3020 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3021 equiv_stack_parm);
3026 if (moved)
3027 /* Nothing to do. */
3029 else if (need_conversion)
3031 /* We did not have an insn to convert directly, or the sequence
3032 generated appeared unsafe. We must first copy the parm to a
3033 pseudo reg, and save the conversion until after all
3034 parameters have been moved. */
3036 int save_tree_used;
3037 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3039 emit_move_insn (tempreg, validated_mem);
3041 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3042 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3044 if (GET_CODE (tempreg) == SUBREG
3045 && GET_MODE (tempreg) == data->nominal_mode
3046 && REG_P (SUBREG_REG (tempreg))
3047 && data->nominal_mode == data->passed_mode
3048 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3049 && GET_MODE_SIZE (GET_MODE (tempreg))
3050 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3052 /* The argument is already sign/zero extended, so note it
3053 into the subreg. */
3054 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3055 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3058 /* TREE_USED gets set erroneously during expand_assignment. */
3059 save_tree_used = TREE_USED (parm);
3060 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3061 TREE_USED (parm) = save_tree_used;
3062 all->first_conversion_insn = get_insns ();
3063 all->last_conversion_insn = get_last_insn ();
3064 end_sequence ();
3066 did_conversion = true;
3068 else
3069 emit_move_insn (parmreg, validated_mem);
3071 /* If we were passed a pointer but the actual value can safely live
3072 in a register, put it in one. */
3073 if (data->passed_pointer
3074 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3075 /* If by-reference argument was promoted, demote it. */
3076 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3077 || use_register_for_decl (parm)))
3079 /* We can't use nominal_mode, because it will have been set to
3080 Pmode above. We must use the actual mode of the parm. */
3081 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3082 mark_user_reg (parmreg);
3084 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3086 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3087 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3089 push_to_sequence2 (all->first_conversion_insn,
3090 all->last_conversion_insn);
3091 emit_move_insn (tempreg, DECL_RTL (parm));
3092 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3093 emit_move_insn (parmreg, tempreg);
3094 all->first_conversion_insn = get_insns ();
3095 all->last_conversion_insn = get_last_insn ();
3096 end_sequence ();
3098 did_conversion = true;
3100 else
3101 emit_move_insn (parmreg, DECL_RTL (parm));
3103 SET_DECL_RTL (parm, parmreg);
3105 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3106 now the parm. */
3107 data->stack_parm = NULL;
3110 /* Mark the register as eliminable if we did no conversion and it was
3111 copied from memory at a fixed offset, and the arg pointer was not
3112 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3113 offset formed an invalid address, such memory-equivalences as we
3114 make here would screw up life analysis for it. */
3115 if (data->nominal_mode == data->passed_mode
3116 && !did_conversion
3117 && data->stack_parm != 0
3118 && MEM_P (data->stack_parm)
3119 && data->locate.offset.var == 0
3120 && reg_mentioned_p (virtual_incoming_args_rtx,
3121 XEXP (data->stack_parm, 0)))
3123 rtx linsn = get_last_insn ();
3124 rtx sinsn, set;
3126 /* Mark complex types separately. */
3127 if (GET_CODE (parmreg) == CONCAT)
3129 enum machine_mode submode
3130 = GET_MODE_INNER (GET_MODE (parmreg));
3131 int regnor = REGNO (XEXP (parmreg, 0));
3132 int regnoi = REGNO (XEXP (parmreg, 1));
3133 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3134 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3135 GET_MODE_SIZE (submode));
3137 /* Scan backwards for the set of the real and
3138 imaginary parts. */
3139 for (sinsn = linsn; sinsn != 0;
3140 sinsn = prev_nonnote_insn (sinsn))
3142 set = single_set (sinsn);
3143 if (set == 0)
3144 continue;
3146 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3147 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3148 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3149 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3152 else if ((set = single_set (linsn)) != 0
3153 && SET_DEST (set) == parmreg)
3154 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3157 /* For pointer data type, suggest pointer register. */
3158 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3159 mark_reg_pointer (parmreg,
3160 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3163 /* A subroutine of assign_parms. Allocate stack space to hold the current
3164 parameter. Get it there. Perform all ABI specified conversions. */
3166 static void
3167 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3168 struct assign_parm_data_one *data)
3170 /* Value must be stored in the stack slot STACK_PARM during function
3171 execution. */
3172 bool to_conversion = false;
3174 assign_parm_remove_parallels (data);
3176 if (data->promoted_mode != data->nominal_mode)
3178 /* Conversion is required. */
3179 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3181 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3183 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3184 to_conversion = true;
3186 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3187 TYPE_UNSIGNED (TREE_TYPE (parm)));
3189 if (data->stack_parm)
3191 int offset = subreg_lowpart_offset (data->nominal_mode,
3192 GET_MODE (data->stack_parm));
3193 /* ??? This may need a big-endian conversion on sparc64. */
3194 data->stack_parm
3195 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3196 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3197 set_mem_offset (data->stack_parm,
3198 MEM_OFFSET (data->stack_parm) + offset);
3202 if (data->entry_parm != data->stack_parm)
3204 rtx src, dest;
3206 if (data->stack_parm == 0)
3208 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3209 GET_MODE (data->entry_parm),
3210 TYPE_ALIGN (data->passed_type));
3211 data->stack_parm
3212 = assign_stack_local (GET_MODE (data->entry_parm),
3213 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3214 align);
3215 set_mem_attributes (data->stack_parm, parm, 1);
3218 dest = validize_mem (data->stack_parm);
3219 src = validize_mem (data->entry_parm);
3221 if (MEM_P (src))
3223 /* Use a block move to handle potentially misaligned entry_parm. */
3224 if (!to_conversion)
3225 push_to_sequence2 (all->first_conversion_insn,
3226 all->last_conversion_insn);
3227 to_conversion = true;
3229 emit_block_move (dest, src,
3230 GEN_INT (int_size_in_bytes (data->passed_type)),
3231 BLOCK_OP_NORMAL);
3233 else
3234 emit_move_insn (dest, src);
3237 if (to_conversion)
3239 all->first_conversion_insn = get_insns ();
3240 all->last_conversion_insn = get_last_insn ();
3241 end_sequence ();
3244 SET_DECL_RTL (parm, data->stack_parm);
3247 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3248 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3250 static void
3251 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3252 VEC(tree, heap) *fnargs)
3254 tree parm;
3255 tree orig_fnargs = all->orig_fnargs;
3256 unsigned i = 0;
3258 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3260 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3261 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3263 rtx tmp, real, imag;
3264 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3266 real = DECL_RTL (VEC_index (tree, fnargs, i));
3267 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3268 if (inner != GET_MODE (real))
3270 real = gen_lowpart_SUBREG (inner, real);
3271 imag = gen_lowpart_SUBREG (inner, imag);
3274 if (TREE_ADDRESSABLE (parm))
3276 rtx rmem, imem;
3277 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3278 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3279 DECL_MODE (parm),
3280 TYPE_ALIGN (TREE_TYPE (parm)));
3282 /* split_complex_arg put the real and imag parts in
3283 pseudos. Move them to memory. */
3284 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3285 set_mem_attributes (tmp, parm, 1);
3286 rmem = adjust_address_nv (tmp, inner, 0);
3287 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3288 push_to_sequence2 (all->first_conversion_insn,
3289 all->last_conversion_insn);
3290 emit_move_insn (rmem, real);
3291 emit_move_insn (imem, imag);
3292 all->first_conversion_insn = get_insns ();
3293 all->last_conversion_insn = get_last_insn ();
3294 end_sequence ();
3296 else
3297 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3298 SET_DECL_RTL (parm, tmp);
3300 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3301 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3302 if (inner != GET_MODE (real))
3304 real = gen_lowpart_SUBREG (inner, real);
3305 imag = gen_lowpart_SUBREG (inner, imag);
3307 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3308 set_decl_incoming_rtl (parm, tmp, false);
3309 i++;
3314 /* Assign RTL expressions to the function's parameters. This may involve
3315 copying them into registers and using those registers as the DECL_RTL. */
3317 static void
3318 assign_parms (tree fndecl)
3320 struct assign_parm_data_all all;
3321 tree parm;
3322 VEC(tree, heap) *fnargs;
3323 unsigned i;
3325 crtl->args.internal_arg_pointer
3326 = targetm.calls.internal_arg_pointer ();
3328 assign_parms_initialize_all (&all);
3329 fnargs = assign_parms_augmented_arg_list (&all);
3331 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3333 struct assign_parm_data_one data;
3335 /* Extract the type of PARM; adjust it according to ABI. */
3336 assign_parm_find_data_types (&all, parm, &data);
3338 /* Early out for errors and void parameters. */
3339 if (data.passed_mode == VOIDmode)
3341 SET_DECL_RTL (parm, const0_rtx);
3342 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3343 continue;
3346 /* Estimate stack alignment from parameter alignment. */
3347 if (SUPPORTS_STACK_ALIGNMENT)
3349 unsigned int align
3350 = targetm.calls.function_arg_boundary (data.promoted_mode,
3351 data.passed_type);
3352 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3353 align);
3354 if (TYPE_ALIGN (data.nominal_type) > align)
3355 align = MINIMUM_ALIGNMENT (data.nominal_type,
3356 TYPE_MODE (data.nominal_type),
3357 TYPE_ALIGN (data.nominal_type));
3358 if (crtl->stack_alignment_estimated < align)
3360 gcc_assert (!crtl->stack_realign_processed);
3361 crtl->stack_alignment_estimated = align;
3365 if (cfun->stdarg && !DECL_CHAIN (parm))
3366 assign_parms_setup_varargs (&all, &data, false);
3368 /* Find out where the parameter arrives in this function. */
3369 assign_parm_find_entry_rtl (&all, &data);
3371 /* Find out where stack space for this parameter might be. */
3372 if (assign_parm_is_stack_parm (&all, &data))
3374 assign_parm_find_stack_rtl (parm, &data);
3375 assign_parm_adjust_entry_rtl (&data);
3378 /* Record permanently how this parm was passed. */
3379 if (data.passed_pointer)
3381 rtx incoming_rtl
3382 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3383 data.entry_parm);
3384 set_decl_incoming_rtl (parm, incoming_rtl, true);
3386 else
3387 set_decl_incoming_rtl (parm, data.entry_parm, false);
3389 /* Update info on where next arg arrives in registers. */
3390 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3391 data.passed_type, data.named_arg);
3393 assign_parm_adjust_stack_rtl (&data);
3395 if (assign_parm_setup_block_p (&data))
3396 assign_parm_setup_block (&all, parm, &data);
3397 else if (data.passed_pointer || use_register_for_decl (parm))
3398 assign_parm_setup_reg (&all, parm, &data);
3399 else
3400 assign_parm_setup_stack (&all, parm, &data);
3403 if (targetm.calls.split_complex_arg)
3404 assign_parms_unsplit_complex (&all, fnargs);
3406 VEC_free (tree, heap, fnargs);
3408 /* Output all parameter conversion instructions (possibly including calls)
3409 now that all parameters have been copied out of hard registers. */
3410 emit_insn (all.first_conversion_insn);
3412 /* Estimate reload stack alignment from scalar return mode. */
3413 if (SUPPORTS_STACK_ALIGNMENT)
3415 if (DECL_RESULT (fndecl))
3417 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3418 enum machine_mode mode = TYPE_MODE (type);
3420 if (mode != BLKmode
3421 && mode != VOIDmode
3422 && !AGGREGATE_TYPE_P (type))
3424 unsigned int align = GET_MODE_ALIGNMENT (mode);
3425 if (crtl->stack_alignment_estimated < align)
3427 gcc_assert (!crtl->stack_realign_processed);
3428 crtl->stack_alignment_estimated = align;
3434 /* If we are receiving a struct value address as the first argument, set up
3435 the RTL for the function result. As this might require code to convert
3436 the transmitted address to Pmode, we do this here to ensure that possible
3437 preliminary conversions of the address have been emitted already. */
3438 if (all.function_result_decl)
3440 tree result = DECL_RESULT (current_function_decl);
3441 rtx addr = DECL_RTL (all.function_result_decl);
3442 rtx x;
3444 if (DECL_BY_REFERENCE (result))
3446 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3447 x = addr;
3449 else
3451 SET_DECL_VALUE_EXPR (result,
3452 build1 (INDIRECT_REF, TREE_TYPE (result),
3453 all.function_result_decl));
3454 addr = convert_memory_address (Pmode, addr);
3455 x = gen_rtx_MEM (DECL_MODE (result), addr);
3456 set_mem_attributes (x, result, 1);
3459 DECL_HAS_VALUE_EXPR_P (result) = 1;
3461 SET_DECL_RTL (result, x);
3464 /* We have aligned all the args, so add space for the pretend args. */
3465 crtl->args.pretend_args_size = all.pretend_args_size;
3466 all.stack_args_size.constant += all.extra_pretend_bytes;
3467 crtl->args.size = all.stack_args_size.constant;
3469 /* Adjust function incoming argument size for alignment and
3470 minimum length. */
3472 #ifdef REG_PARM_STACK_SPACE
3473 crtl->args.size = MAX (crtl->args.size,
3474 REG_PARM_STACK_SPACE (fndecl));
3475 #endif
3477 crtl->args.size = CEIL_ROUND (crtl->args.size,
3478 PARM_BOUNDARY / BITS_PER_UNIT);
3480 #ifdef ARGS_GROW_DOWNWARD
3481 crtl->args.arg_offset_rtx
3482 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3483 : expand_expr (size_diffop (all.stack_args_size.var,
3484 size_int (-all.stack_args_size.constant)),
3485 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3486 #else
3487 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3488 #endif
3490 /* See how many bytes, if any, of its args a function should try to pop
3491 on return. */
3493 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3494 TREE_TYPE (fndecl),
3495 crtl->args.size);
3497 /* For stdarg.h function, save info about
3498 regs and stack space used by the named args. */
3500 crtl->args.info = all.args_so_far_v;
3502 /* Set the rtx used for the function return value. Put this in its
3503 own variable so any optimizers that need this information don't have
3504 to include tree.h. Do this here so it gets done when an inlined
3505 function gets output. */
3507 crtl->return_rtx
3508 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3509 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3511 /* If scalar return value was computed in a pseudo-reg, or was a named
3512 return value that got dumped to the stack, copy that to the hard
3513 return register. */
3514 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3516 tree decl_result = DECL_RESULT (fndecl);
3517 rtx decl_rtl = DECL_RTL (decl_result);
3519 if (REG_P (decl_rtl)
3520 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3521 : DECL_REGISTER (decl_result))
3523 rtx real_decl_rtl;
3525 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3526 fndecl, true);
3527 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3528 /* The delay slot scheduler assumes that crtl->return_rtx
3529 holds the hard register containing the return value, not a
3530 temporary pseudo. */
3531 crtl->return_rtx = real_decl_rtl;
3536 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3537 For all seen types, gimplify their sizes. */
3539 static tree
3540 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3542 tree t = *tp;
3544 *walk_subtrees = 0;
3545 if (TYPE_P (t))
3547 if (POINTER_TYPE_P (t))
3548 *walk_subtrees = 1;
3549 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3550 && !TYPE_SIZES_GIMPLIFIED (t))
3552 gimplify_type_sizes (t, (gimple_seq *) data);
3553 *walk_subtrees = 1;
3557 return NULL;
3560 /* Gimplify the parameter list for current_function_decl. This involves
3561 evaluating SAVE_EXPRs of variable sized parameters and generating code
3562 to implement callee-copies reference parameters. Returns a sequence of
3563 statements to add to the beginning of the function. */
3565 gimple_seq
3566 gimplify_parameters (void)
3568 struct assign_parm_data_all all;
3569 tree parm;
3570 gimple_seq stmts = NULL;
3571 VEC(tree, heap) *fnargs;
3572 unsigned i;
3574 assign_parms_initialize_all (&all);
3575 fnargs = assign_parms_augmented_arg_list (&all);
3577 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3579 struct assign_parm_data_one data;
3581 /* Extract the type of PARM; adjust it according to ABI. */
3582 assign_parm_find_data_types (&all, parm, &data);
3584 /* Early out for errors and void parameters. */
3585 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3586 continue;
3588 /* Update info on where next arg arrives in registers. */
3589 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3590 data.passed_type, data.named_arg);
3592 /* ??? Once upon a time variable_size stuffed parameter list
3593 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3594 turned out to be less than manageable in the gimple world.
3595 Now we have to hunt them down ourselves. */
3596 walk_tree_without_duplicates (&data.passed_type,
3597 gimplify_parm_type, &stmts);
3599 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3601 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3602 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3605 if (data.passed_pointer)
3607 tree type = TREE_TYPE (data.passed_type);
3608 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3609 type, data.named_arg))
3611 tree local, t;
3613 /* For constant-sized objects, this is trivial; for
3614 variable-sized objects, we have to play games. */
3615 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3616 && !(flag_stack_check == GENERIC_STACK_CHECK
3617 && compare_tree_int (DECL_SIZE_UNIT (parm),
3618 STACK_CHECK_MAX_VAR_SIZE) > 0))
3620 local = create_tmp_reg (type, get_name (parm));
3621 DECL_IGNORED_P (local) = 0;
3622 /* If PARM was addressable, move that flag over
3623 to the local copy, as its address will be taken,
3624 not the PARMs. Keep the parms address taken
3625 as we'll query that flag during gimplification. */
3626 if (TREE_ADDRESSABLE (parm))
3627 TREE_ADDRESSABLE (local) = 1;
3629 else
3631 tree ptr_type, addr;
3633 ptr_type = build_pointer_type (type);
3634 addr = create_tmp_reg (ptr_type, get_name (parm));
3635 DECL_IGNORED_P (addr) = 0;
3636 local = build_fold_indirect_ref (addr);
3638 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3639 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3640 size_int (DECL_ALIGN (parm)));
3642 /* The call has been built for a variable-sized object. */
3643 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3644 t = fold_convert (ptr_type, t);
3645 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3646 gimplify_and_add (t, &stmts);
3649 gimplify_assign (local, parm, &stmts);
3651 SET_DECL_VALUE_EXPR (parm, local);
3652 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3657 VEC_free (tree, heap, fnargs);
3659 return stmts;
3662 /* Compute the size and offset from the start of the stacked arguments for a
3663 parm passed in mode PASSED_MODE and with type TYPE.
3665 INITIAL_OFFSET_PTR points to the current offset into the stacked
3666 arguments.
3668 The starting offset and size for this parm are returned in
3669 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3670 nonzero, the offset is that of stack slot, which is returned in
3671 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3672 padding required from the initial offset ptr to the stack slot.
3674 IN_REGS is nonzero if the argument will be passed in registers. It will
3675 never be set if REG_PARM_STACK_SPACE is not defined.
3677 FNDECL is the function in which the argument was defined.
3679 There are two types of rounding that are done. The first, controlled by
3680 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3681 argument list to be aligned to the specific boundary (in bits). This
3682 rounding affects the initial and starting offsets, but not the argument
3683 size.
3685 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3686 optionally rounds the size of the parm to PARM_BOUNDARY. The
3687 initial offset is not affected by this rounding, while the size always
3688 is and the starting offset may be. */
3690 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3691 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3692 callers pass in the total size of args so far as
3693 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3695 void
3696 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3697 int partial, tree fndecl ATTRIBUTE_UNUSED,
3698 struct args_size *initial_offset_ptr,
3699 struct locate_and_pad_arg_data *locate)
3701 tree sizetree;
3702 enum direction where_pad;
3703 unsigned int boundary, round_boundary;
3704 int reg_parm_stack_space = 0;
3705 int part_size_in_regs;
3707 #ifdef REG_PARM_STACK_SPACE
3708 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3710 /* If we have found a stack parm before we reach the end of the
3711 area reserved for registers, skip that area. */
3712 if (! in_regs)
3714 if (reg_parm_stack_space > 0)
3716 if (initial_offset_ptr->var)
3718 initial_offset_ptr->var
3719 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3720 ssize_int (reg_parm_stack_space));
3721 initial_offset_ptr->constant = 0;
3723 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3724 initial_offset_ptr->constant = reg_parm_stack_space;
3727 #endif /* REG_PARM_STACK_SPACE */
3729 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3731 sizetree
3732 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3733 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3734 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3735 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3736 type);
3737 locate->where_pad = where_pad;
3739 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3740 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3741 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3743 locate->boundary = boundary;
3745 if (SUPPORTS_STACK_ALIGNMENT)
3747 /* stack_alignment_estimated can't change after stack has been
3748 realigned. */
3749 if (crtl->stack_alignment_estimated < boundary)
3751 if (!crtl->stack_realign_processed)
3752 crtl->stack_alignment_estimated = boundary;
3753 else
3755 /* If stack is realigned and stack alignment value
3756 hasn't been finalized, it is OK not to increase
3757 stack_alignment_estimated. The bigger alignment
3758 requirement is recorded in stack_alignment_needed
3759 below. */
3760 gcc_assert (!crtl->stack_realign_finalized
3761 && crtl->stack_realign_needed);
3766 /* Remember if the outgoing parameter requires extra alignment on the
3767 calling function side. */
3768 if (crtl->stack_alignment_needed < boundary)
3769 crtl->stack_alignment_needed = boundary;
3770 if (crtl->preferred_stack_boundary < boundary)
3771 crtl->preferred_stack_boundary = boundary;
3773 #ifdef ARGS_GROW_DOWNWARD
3774 locate->slot_offset.constant = -initial_offset_ptr->constant;
3775 if (initial_offset_ptr->var)
3776 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3777 initial_offset_ptr->var);
3780 tree s2 = sizetree;
3781 if (where_pad != none
3782 && (!host_integerp (sizetree, 1)
3783 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3784 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3785 SUB_PARM_SIZE (locate->slot_offset, s2);
3788 locate->slot_offset.constant += part_size_in_regs;
3790 if (!in_regs
3791 #ifdef REG_PARM_STACK_SPACE
3792 || REG_PARM_STACK_SPACE (fndecl) > 0
3793 #endif
3795 pad_to_arg_alignment (&locate->slot_offset, boundary,
3796 &locate->alignment_pad);
3798 locate->size.constant = (-initial_offset_ptr->constant
3799 - locate->slot_offset.constant);
3800 if (initial_offset_ptr->var)
3801 locate->size.var = size_binop (MINUS_EXPR,
3802 size_binop (MINUS_EXPR,
3803 ssize_int (0),
3804 initial_offset_ptr->var),
3805 locate->slot_offset.var);
3807 /* Pad_below needs the pre-rounded size to know how much to pad
3808 below. */
3809 locate->offset = locate->slot_offset;
3810 if (where_pad == downward)
3811 pad_below (&locate->offset, passed_mode, sizetree);
3813 #else /* !ARGS_GROW_DOWNWARD */
3814 if (!in_regs
3815 #ifdef REG_PARM_STACK_SPACE
3816 || REG_PARM_STACK_SPACE (fndecl) > 0
3817 #endif
3819 pad_to_arg_alignment (initial_offset_ptr, boundary,
3820 &locate->alignment_pad);
3821 locate->slot_offset = *initial_offset_ptr;
3823 #ifdef PUSH_ROUNDING
3824 if (passed_mode != BLKmode)
3825 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3826 #endif
3828 /* Pad_below needs the pre-rounded size to know how much to pad below
3829 so this must be done before rounding up. */
3830 locate->offset = locate->slot_offset;
3831 if (where_pad == downward)
3832 pad_below (&locate->offset, passed_mode, sizetree);
3834 if (where_pad != none
3835 && (!host_integerp (sizetree, 1)
3836 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3837 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3839 ADD_PARM_SIZE (locate->size, sizetree);
3841 locate->size.constant -= part_size_in_regs;
3842 #endif /* ARGS_GROW_DOWNWARD */
3844 #ifdef FUNCTION_ARG_OFFSET
3845 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3846 #endif
3849 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3850 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3852 static void
3853 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3854 struct args_size *alignment_pad)
3856 tree save_var = NULL_TREE;
3857 HOST_WIDE_INT save_constant = 0;
3858 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3859 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3861 #ifdef SPARC_STACK_BOUNDARY_HACK
3862 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3863 the real alignment of %sp. However, when it does this, the
3864 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3865 if (SPARC_STACK_BOUNDARY_HACK)
3866 sp_offset = 0;
3867 #endif
3869 if (boundary > PARM_BOUNDARY)
3871 save_var = offset_ptr->var;
3872 save_constant = offset_ptr->constant;
3875 alignment_pad->var = NULL_TREE;
3876 alignment_pad->constant = 0;
3878 if (boundary > BITS_PER_UNIT)
3880 if (offset_ptr->var)
3882 tree sp_offset_tree = ssize_int (sp_offset);
3883 tree offset = size_binop (PLUS_EXPR,
3884 ARGS_SIZE_TREE (*offset_ptr),
3885 sp_offset_tree);
3886 #ifdef ARGS_GROW_DOWNWARD
3887 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3888 #else
3889 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3890 #endif
3892 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3893 /* ARGS_SIZE_TREE includes constant term. */
3894 offset_ptr->constant = 0;
3895 if (boundary > PARM_BOUNDARY)
3896 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3897 save_var);
3899 else
3901 offset_ptr->constant = -sp_offset +
3902 #ifdef ARGS_GROW_DOWNWARD
3903 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3904 #else
3905 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3906 #endif
3907 if (boundary > PARM_BOUNDARY)
3908 alignment_pad->constant = offset_ptr->constant - save_constant;
3913 static void
3914 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3916 if (passed_mode != BLKmode)
3918 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3919 offset_ptr->constant
3920 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3921 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3922 - GET_MODE_SIZE (passed_mode));
3924 else
3926 if (TREE_CODE (sizetree) != INTEGER_CST
3927 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3929 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3930 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3931 /* Add it in. */
3932 ADD_PARM_SIZE (*offset_ptr, s2);
3933 SUB_PARM_SIZE (*offset_ptr, sizetree);
3939 /* True if register REGNO was alive at a place where `setjmp' was
3940 called and was set more than once or is an argument. Such regs may
3941 be clobbered by `longjmp'. */
3943 static bool
3944 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3946 /* There appear to be cases where some local vars never reach the
3947 backend but have bogus regnos. */
3948 if (regno >= max_reg_num ())
3949 return false;
3951 return ((REG_N_SETS (regno) > 1
3952 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3953 && REGNO_REG_SET_P (setjmp_crosses, regno));
3956 /* Walk the tree of blocks describing the binding levels within a
3957 function and warn about variables the might be killed by setjmp or
3958 vfork. This is done after calling flow_analysis before register
3959 allocation since that will clobber the pseudo-regs to hard
3960 regs. */
3962 static void
3963 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3965 tree decl, sub;
3967 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3969 if (TREE_CODE (decl) == VAR_DECL
3970 && DECL_RTL_SET_P (decl)
3971 && REG_P (DECL_RTL (decl))
3972 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3973 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3974 " %<longjmp%> or %<vfork%>", decl);
3977 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3978 setjmp_vars_warning (setjmp_crosses, sub);
3981 /* Do the appropriate part of setjmp_vars_warning
3982 but for arguments instead of local variables. */
3984 static void
3985 setjmp_args_warning (bitmap setjmp_crosses)
3987 tree decl;
3988 for (decl = DECL_ARGUMENTS (current_function_decl);
3989 decl; decl = DECL_CHAIN (decl))
3990 if (DECL_RTL (decl) != 0
3991 && REG_P (DECL_RTL (decl))
3992 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3993 warning (OPT_Wclobbered,
3994 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3995 decl);
3998 /* Generate warning messages for variables live across setjmp. */
4000 void
4001 generate_setjmp_warnings (void)
4003 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4005 if (n_basic_blocks == NUM_FIXED_BLOCKS
4006 || bitmap_empty_p (setjmp_crosses))
4007 return;
4009 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4010 setjmp_args_warning (setjmp_crosses);
4014 /* Reverse the order of elements in the fragment chain T of blocks,
4015 and return the new head of the chain (old last element). */
4017 static tree
4018 block_fragments_nreverse (tree t)
4020 tree prev = 0, block, next;
4021 for (block = t; block; block = next)
4023 next = BLOCK_FRAGMENT_CHAIN (block);
4024 BLOCK_FRAGMENT_CHAIN (block) = prev;
4025 prev = block;
4027 return prev;
4030 /* Reverse the order of elements in the chain T of blocks,
4031 and return the new head of the chain (old last element).
4032 Also do the same on subblocks and reverse the order of elements
4033 in BLOCK_FRAGMENT_CHAIN as well. */
4035 static tree
4036 blocks_nreverse_all (tree t)
4038 tree prev = 0, block, next;
4039 for (block = t; block; block = next)
4041 next = BLOCK_CHAIN (block);
4042 BLOCK_CHAIN (block) = prev;
4043 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4044 if (BLOCK_FRAGMENT_CHAIN (block)
4045 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4046 BLOCK_FRAGMENT_CHAIN (block)
4047 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4048 prev = block;
4050 return prev;
4054 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4055 and create duplicate blocks. */
4056 /* ??? Need an option to either create block fragments or to create
4057 abstract origin duplicates of a source block. It really depends
4058 on what optimization has been performed. */
4060 void
4061 reorder_blocks (void)
4063 tree block = DECL_INITIAL (current_function_decl);
4064 VEC(tree,heap) *block_stack;
4066 if (block == NULL_TREE)
4067 return;
4069 block_stack = VEC_alloc (tree, heap, 10);
4071 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4072 clear_block_marks (block);
4074 /* Prune the old trees away, so that they don't get in the way. */
4075 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4076 BLOCK_CHAIN (block) = NULL_TREE;
4078 /* Recreate the block tree from the note nesting. */
4079 reorder_blocks_1 (get_insns (), block, &block_stack);
4080 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4082 VEC_free (tree, heap, block_stack);
4085 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4087 void
4088 clear_block_marks (tree block)
4090 while (block)
4092 TREE_ASM_WRITTEN (block) = 0;
4093 clear_block_marks (BLOCK_SUBBLOCKS (block));
4094 block = BLOCK_CHAIN (block);
4098 static void
4099 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4101 rtx insn;
4103 for (insn = insns; insn; insn = NEXT_INSN (insn))
4105 if (NOTE_P (insn))
4107 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4109 tree block = NOTE_BLOCK (insn);
4110 tree origin;
4112 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4113 origin = block;
4115 /* If we have seen this block before, that means it now
4116 spans multiple address regions. Create a new fragment. */
4117 if (TREE_ASM_WRITTEN (block))
4119 tree new_block = copy_node (block);
4121 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4122 BLOCK_FRAGMENT_CHAIN (new_block)
4123 = BLOCK_FRAGMENT_CHAIN (origin);
4124 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4126 NOTE_BLOCK (insn) = new_block;
4127 block = new_block;
4130 BLOCK_SUBBLOCKS (block) = 0;
4131 TREE_ASM_WRITTEN (block) = 1;
4132 /* When there's only one block for the entire function,
4133 current_block == block and we mustn't do this, it
4134 will cause infinite recursion. */
4135 if (block != current_block)
4137 if (block != origin)
4138 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4140 BLOCK_SUPERCONTEXT (block) = current_block;
4141 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4142 BLOCK_SUBBLOCKS (current_block) = block;
4143 current_block = origin;
4145 VEC_safe_push (tree, heap, *p_block_stack, block);
4147 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4149 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4150 current_block = BLOCK_SUPERCONTEXT (current_block);
4156 /* Reverse the order of elements in the chain T of blocks,
4157 and return the new head of the chain (old last element). */
4159 tree
4160 blocks_nreverse (tree t)
4162 tree prev = 0, block, next;
4163 for (block = t; block; block = next)
4165 next = BLOCK_CHAIN (block);
4166 BLOCK_CHAIN (block) = prev;
4167 prev = block;
4169 return prev;
4172 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4173 by modifying the last node in chain 1 to point to chain 2. */
4175 tree
4176 block_chainon (tree op1, tree op2)
4178 tree t1;
4180 if (!op1)
4181 return op2;
4182 if (!op2)
4183 return op1;
4185 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4186 continue;
4187 BLOCK_CHAIN (t1) = op2;
4189 #ifdef ENABLE_TREE_CHECKING
4191 tree t2;
4192 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4193 gcc_assert (t2 != t1);
4195 #endif
4197 return op1;
4200 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4201 non-NULL, list them all into VECTOR, in a depth-first preorder
4202 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4203 blocks. */
4205 static int
4206 all_blocks (tree block, tree *vector)
4208 int n_blocks = 0;
4210 while (block)
4212 TREE_ASM_WRITTEN (block) = 0;
4214 /* Record this block. */
4215 if (vector)
4216 vector[n_blocks] = block;
4218 ++n_blocks;
4220 /* Record the subblocks, and their subblocks... */
4221 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4222 vector ? vector + n_blocks : 0);
4223 block = BLOCK_CHAIN (block);
4226 return n_blocks;
4229 /* Return a vector containing all the blocks rooted at BLOCK. The
4230 number of elements in the vector is stored in N_BLOCKS_P. The
4231 vector is dynamically allocated; it is the caller's responsibility
4232 to call `free' on the pointer returned. */
4234 static tree *
4235 get_block_vector (tree block, int *n_blocks_p)
4237 tree *block_vector;
4239 *n_blocks_p = all_blocks (block, NULL);
4240 block_vector = XNEWVEC (tree, *n_blocks_p);
4241 all_blocks (block, block_vector);
4243 return block_vector;
4246 static GTY(()) int next_block_index = 2;
4248 /* Set BLOCK_NUMBER for all the blocks in FN. */
4250 void
4251 number_blocks (tree fn)
4253 int i;
4254 int n_blocks;
4255 tree *block_vector;
4257 /* For SDB and XCOFF debugging output, we start numbering the blocks
4258 from 1 within each function, rather than keeping a running
4259 count. */
4260 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4261 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4262 next_block_index = 1;
4263 #endif
4265 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4267 /* The top-level BLOCK isn't numbered at all. */
4268 for (i = 1; i < n_blocks; ++i)
4269 /* We number the blocks from two. */
4270 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4272 free (block_vector);
4274 return;
4277 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4279 DEBUG_FUNCTION tree
4280 debug_find_var_in_block_tree (tree var, tree block)
4282 tree t;
4284 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4285 if (t == var)
4286 return block;
4288 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4290 tree ret = debug_find_var_in_block_tree (var, t);
4291 if (ret)
4292 return ret;
4295 return NULL_TREE;
4298 /* Keep track of whether we're in a dummy function context. If we are,
4299 we don't want to invoke the set_current_function hook, because we'll
4300 get into trouble if the hook calls target_reinit () recursively or
4301 when the initial initialization is not yet complete. */
4303 static bool in_dummy_function;
4305 /* Invoke the target hook when setting cfun. Update the optimization options
4306 if the function uses different options than the default. */
4308 static void
4309 invoke_set_current_function_hook (tree fndecl)
4311 if (!in_dummy_function)
4313 tree opts = ((fndecl)
4314 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4315 : optimization_default_node);
4317 if (!opts)
4318 opts = optimization_default_node;
4320 /* Change optimization options if needed. */
4321 if (optimization_current_node != opts)
4323 optimization_current_node = opts;
4324 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4327 targetm.set_current_function (fndecl);
4331 /* cfun should never be set directly; use this function. */
4333 void
4334 set_cfun (struct function *new_cfun)
4336 if (cfun != new_cfun)
4338 cfun = new_cfun;
4339 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4343 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4345 static VEC(function_p,heap) *cfun_stack;
4347 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4349 void
4350 push_cfun (struct function *new_cfun)
4352 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4353 set_cfun (new_cfun);
4356 /* Pop cfun from the stack. */
4358 void
4359 pop_cfun (void)
4361 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4362 set_cfun (new_cfun);
4365 /* Return value of funcdef and increase it. */
4367 get_next_funcdef_no (void)
4369 return funcdef_no++;
4372 /* Return value of funcdef. */
4374 get_last_funcdef_no (void)
4376 return funcdef_no;
4379 /* Allocate a function structure for FNDECL and set its contents
4380 to the defaults. Set cfun to the newly-allocated object.
4381 Some of the helper functions invoked during initialization assume
4382 that cfun has already been set. Therefore, assign the new object
4383 directly into cfun and invoke the back end hook explicitly at the
4384 very end, rather than initializing a temporary and calling set_cfun
4385 on it.
4387 ABSTRACT_P is true if this is a function that will never be seen by
4388 the middle-end. Such functions are front-end concepts (like C++
4389 function templates) that do not correspond directly to functions
4390 placed in object files. */
4392 void
4393 allocate_struct_function (tree fndecl, bool abstract_p)
4395 tree result;
4396 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4398 cfun = ggc_alloc_cleared_function ();
4400 init_eh_for_function ();
4402 if (init_machine_status)
4403 cfun->machine = (*init_machine_status) ();
4405 #ifdef OVERRIDE_ABI_FORMAT
4406 OVERRIDE_ABI_FORMAT (fndecl);
4407 #endif
4409 invoke_set_current_function_hook (fndecl);
4411 if (fndecl != NULL_TREE)
4413 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4414 cfun->decl = fndecl;
4415 current_function_funcdef_no = get_next_funcdef_no ();
4417 result = DECL_RESULT (fndecl);
4418 if (!abstract_p && aggregate_value_p (result, fndecl))
4420 #ifdef PCC_STATIC_STRUCT_RETURN
4421 cfun->returns_pcc_struct = 1;
4422 #endif
4423 cfun->returns_struct = 1;
4426 cfun->stdarg = stdarg_p (fntype);
4428 /* Assume all registers in stdarg functions need to be saved. */
4429 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4430 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4432 /* ??? This could be set on a per-function basis by the front-end
4433 but is this worth the hassle? */
4434 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4438 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4439 instead of just setting it. */
4441 void
4442 push_struct_function (tree fndecl)
4444 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4445 allocate_struct_function (fndecl, false);
4448 /* Reset crtl and other non-struct-function variables to defaults as
4449 appropriate for emitting rtl at the start of a function. */
4451 static void
4452 prepare_function_start (void)
4454 gcc_assert (!crtl->emit.x_last_insn);
4455 init_temp_slots ();
4456 init_emit ();
4457 init_varasm_status ();
4458 init_expr ();
4459 default_rtl_profile ();
4461 if (flag_stack_usage_info)
4463 cfun->su = ggc_alloc_cleared_stack_usage ();
4464 cfun->su->static_stack_size = -1;
4467 cse_not_expected = ! optimize;
4469 /* Caller save not needed yet. */
4470 caller_save_needed = 0;
4472 /* We haven't done register allocation yet. */
4473 reg_renumber = 0;
4475 /* Indicate that we have not instantiated virtual registers yet. */
4476 virtuals_instantiated = 0;
4478 /* Indicate that we want CONCATs now. */
4479 generating_concat_p = 1;
4481 /* Indicate we have no need of a frame pointer yet. */
4482 frame_pointer_needed = 0;
4485 /* Initialize the rtl expansion mechanism so that we can do simple things
4486 like generate sequences. This is used to provide a context during global
4487 initialization of some passes. You must call expand_dummy_function_end
4488 to exit this context. */
4490 void
4491 init_dummy_function_start (void)
4493 gcc_assert (!in_dummy_function);
4494 in_dummy_function = true;
4495 push_struct_function (NULL_TREE);
4496 prepare_function_start ();
4499 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4500 and initialize static variables for generating RTL for the statements
4501 of the function. */
4503 void
4504 init_function_start (tree subr)
4506 if (subr && DECL_STRUCT_FUNCTION (subr))
4507 set_cfun (DECL_STRUCT_FUNCTION (subr));
4508 else
4509 allocate_struct_function (subr, false);
4510 prepare_function_start ();
4511 decide_function_section (subr);
4513 /* Warn if this value is an aggregate type,
4514 regardless of which calling convention we are using for it. */
4515 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4516 warning (OPT_Waggregate_return, "function returns an aggregate");
4519 /* Make sure all values used by the optimization passes have sane defaults. */
4520 unsigned int
4521 init_function_for_compilation (void)
4523 reg_renumber = 0;
4524 return 0;
4527 struct rtl_opt_pass pass_init_function =
4530 RTL_PASS,
4531 "*init_function", /* name */
4532 NULL, /* gate */
4533 init_function_for_compilation, /* execute */
4534 NULL, /* sub */
4535 NULL, /* next */
4536 0, /* static_pass_number */
4537 TV_NONE, /* tv_id */
4538 0, /* properties_required */
4539 0, /* properties_provided */
4540 0, /* properties_destroyed */
4541 0, /* todo_flags_start */
4542 0 /* todo_flags_finish */
4547 void
4548 expand_main_function (void)
4550 #if (defined(INVOKE__main) \
4551 || (!defined(HAS_INIT_SECTION) \
4552 && !defined(INIT_SECTION_ASM_OP) \
4553 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4554 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4555 #endif
4558 /* Expand code to initialize the stack_protect_guard. This is invoked at
4559 the beginning of a function to be protected. */
4561 #ifndef HAVE_stack_protect_set
4562 # define HAVE_stack_protect_set 0
4563 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4564 #endif
4566 void
4567 stack_protect_prologue (void)
4569 tree guard_decl = targetm.stack_protect_guard ();
4570 rtx x, y;
4572 x = expand_normal (crtl->stack_protect_guard);
4573 y = expand_normal (guard_decl);
4575 /* Allow the target to copy from Y to X without leaking Y into a
4576 register. */
4577 if (HAVE_stack_protect_set)
4579 rtx insn = gen_stack_protect_set (x, y);
4580 if (insn)
4582 emit_insn (insn);
4583 return;
4587 /* Otherwise do a straight move. */
4588 emit_move_insn (x, y);
4591 /* Expand code to verify the stack_protect_guard. This is invoked at
4592 the end of a function to be protected. */
4594 #ifndef HAVE_stack_protect_test
4595 # define HAVE_stack_protect_test 0
4596 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4597 #endif
4599 void
4600 stack_protect_epilogue (void)
4602 tree guard_decl = targetm.stack_protect_guard ();
4603 rtx label = gen_label_rtx ();
4604 rtx x, y, tmp;
4606 x = expand_normal (crtl->stack_protect_guard);
4607 y = expand_normal (guard_decl);
4609 /* Allow the target to compare Y with X without leaking either into
4610 a register. */
4611 switch (HAVE_stack_protect_test != 0)
4613 case 1:
4614 tmp = gen_stack_protect_test (x, y, label);
4615 if (tmp)
4617 emit_insn (tmp);
4618 break;
4620 /* FALLTHRU */
4622 default:
4623 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4624 break;
4627 /* The noreturn predictor has been moved to the tree level. The rtl-level
4628 predictors estimate this branch about 20%, which isn't enough to get
4629 things moved out of line. Since this is the only extant case of adding
4630 a noreturn function at the rtl level, it doesn't seem worth doing ought
4631 except adding the prediction by hand. */
4632 tmp = get_last_insn ();
4633 if (JUMP_P (tmp))
4634 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4636 expand_expr_stmt (targetm.stack_protect_fail ());
4637 emit_label (label);
4640 /* Start the RTL for a new function, and set variables used for
4641 emitting RTL.
4642 SUBR is the FUNCTION_DECL node.
4643 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4644 the function's parameters, which must be run at any return statement. */
4646 void
4647 expand_function_start (tree subr)
4649 /* Make sure volatile mem refs aren't considered
4650 valid operands of arithmetic insns. */
4651 init_recog_no_volatile ();
4653 crtl->profile
4654 = (profile_flag
4655 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4657 crtl->limit_stack
4658 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4660 /* Make the label for return statements to jump to. Do not special
4661 case machines with special return instructions -- they will be
4662 handled later during jump, ifcvt, or epilogue creation. */
4663 return_label = gen_label_rtx ();
4665 /* Initialize rtx used to return the value. */
4666 /* Do this before assign_parms so that we copy the struct value address
4667 before any library calls that assign parms might generate. */
4669 /* Decide whether to return the value in memory or in a register. */
4670 if (aggregate_value_p (DECL_RESULT (subr), subr))
4672 /* Returning something that won't go in a register. */
4673 rtx value_address = 0;
4675 #ifdef PCC_STATIC_STRUCT_RETURN
4676 if (cfun->returns_pcc_struct)
4678 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4679 value_address = assemble_static_space (size);
4681 else
4682 #endif
4684 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4685 /* Expect to be passed the address of a place to store the value.
4686 If it is passed as an argument, assign_parms will take care of
4687 it. */
4688 if (sv)
4690 value_address = gen_reg_rtx (Pmode);
4691 emit_move_insn (value_address, sv);
4694 if (value_address)
4696 rtx x = value_address;
4697 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4699 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4700 set_mem_attributes (x, DECL_RESULT (subr), 1);
4702 SET_DECL_RTL (DECL_RESULT (subr), x);
4705 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4706 /* If return mode is void, this decl rtl should not be used. */
4707 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4708 else
4710 /* Compute the return values into a pseudo reg, which we will copy
4711 into the true return register after the cleanups are done. */
4712 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4713 if (TYPE_MODE (return_type) != BLKmode
4714 && targetm.calls.return_in_msb (return_type))
4715 /* expand_function_end will insert the appropriate padding in
4716 this case. Use the return value's natural (unpadded) mode
4717 within the function proper. */
4718 SET_DECL_RTL (DECL_RESULT (subr),
4719 gen_reg_rtx (TYPE_MODE (return_type)));
4720 else
4722 /* In order to figure out what mode to use for the pseudo, we
4723 figure out what the mode of the eventual return register will
4724 actually be, and use that. */
4725 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4727 /* Structures that are returned in registers are not
4728 aggregate_value_p, so we may see a PARALLEL or a REG. */
4729 if (REG_P (hard_reg))
4730 SET_DECL_RTL (DECL_RESULT (subr),
4731 gen_reg_rtx (GET_MODE (hard_reg)));
4732 else
4734 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4735 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4739 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4740 result to the real return register(s). */
4741 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4744 /* Initialize rtx for parameters and local variables.
4745 In some cases this requires emitting insns. */
4746 assign_parms (subr);
4748 /* If function gets a static chain arg, store it. */
4749 if (cfun->static_chain_decl)
4751 tree parm = cfun->static_chain_decl;
4752 rtx local, chain, insn;
4754 local = gen_reg_rtx (Pmode);
4755 chain = targetm.calls.static_chain (current_function_decl, true);
4757 set_decl_incoming_rtl (parm, chain, false);
4758 SET_DECL_RTL (parm, local);
4759 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4761 insn = emit_move_insn (local, chain);
4763 /* Mark the register as eliminable, similar to parameters. */
4764 if (MEM_P (chain)
4765 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4766 set_unique_reg_note (insn, REG_EQUIV, chain);
4769 /* If the function receives a non-local goto, then store the
4770 bits we need to restore the frame pointer. */
4771 if (cfun->nonlocal_goto_save_area)
4773 tree t_save;
4774 rtx r_save;
4776 /* ??? We need to do this save early. Unfortunately here is
4777 before the frame variable gets declared. Help out... */
4778 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4779 if (!DECL_RTL_SET_P (var))
4780 expand_decl (var);
4782 t_save = build4 (ARRAY_REF,
4783 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4784 cfun->nonlocal_goto_save_area,
4785 integer_zero_node, NULL_TREE, NULL_TREE);
4786 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4787 gcc_assert (GET_MODE (r_save) == Pmode);
4789 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4790 update_nonlocal_goto_save_area ();
4793 /* The following was moved from init_function_start.
4794 The move is supposed to make sdb output more accurate. */
4795 /* Indicate the beginning of the function body,
4796 as opposed to parm setup. */
4797 emit_note (NOTE_INSN_FUNCTION_BEG);
4799 gcc_assert (NOTE_P (get_last_insn ()));
4801 parm_birth_insn = get_last_insn ();
4803 if (crtl->profile)
4805 #ifdef PROFILE_HOOK
4806 PROFILE_HOOK (current_function_funcdef_no);
4807 #endif
4810 /* If we are doing generic stack checking, the probe should go here. */
4811 if (flag_stack_check == GENERIC_STACK_CHECK)
4812 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4814 /* Make sure there is a line number after the function entry setup code. */
4815 force_next_line_note ();
4818 /* Undo the effects of init_dummy_function_start. */
4819 void
4820 expand_dummy_function_end (void)
4822 gcc_assert (in_dummy_function);
4824 /* End any sequences that failed to be closed due to syntax errors. */
4825 while (in_sequence_p ())
4826 end_sequence ();
4828 /* Outside function body, can't compute type's actual size
4829 until next function's body starts. */
4831 free_after_parsing (cfun);
4832 free_after_compilation (cfun);
4833 pop_cfun ();
4834 in_dummy_function = false;
4837 /* Call DOIT for each hard register used as a return value from
4838 the current function. */
4840 void
4841 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4843 rtx outgoing = crtl->return_rtx;
4845 if (! outgoing)
4846 return;
4848 if (REG_P (outgoing))
4849 (*doit) (outgoing, arg);
4850 else if (GET_CODE (outgoing) == PARALLEL)
4852 int i;
4854 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4856 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4858 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4859 (*doit) (x, arg);
4864 static void
4865 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4867 emit_clobber (reg);
4870 void
4871 clobber_return_register (void)
4873 diddle_return_value (do_clobber_return_reg, NULL);
4875 /* In case we do use pseudo to return value, clobber it too. */
4876 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4878 tree decl_result = DECL_RESULT (current_function_decl);
4879 rtx decl_rtl = DECL_RTL (decl_result);
4880 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4882 do_clobber_return_reg (decl_rtl, NULL);
4887 static void
4888 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4890 emit_use (reg);
4893 static void
4894 use_return_register (void)
4896 diddle_return_value (do_use_return_reg, NULL);
4899 /* Possibly warn about unused parameters. */
4900 void
4901 do_warn_unused_parameter (tree fn)
4903 tree decl;
4905 for (decl = DECL_ARGUMENTS (fn);
4906 decl; decl = DECL_CHAIN (decl))
4907 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4908 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4909 && !TREE_NO_WARNING (decl))
4910 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4913 static GTY(()) rtx initial_trampoline;
4915 /* Generate RTL for the end of the current function. */
4917 void
4918 expand_function_end (void)
4920 rtx clobber_after;
4922 /* If arg_pointer_save_area was referenced only from a nested
4923 function, we will not have initialized it yet. Do that now. */
4924 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4925 get_arg_pointer_save_area ();
4927 /* If we are doing generic stack checking and this function makes calls,
4928 do a stack probe at the start of the function to ensure we have enough
4929 space for another stack frame. */
4930 if (flag_stack_check == GENERIC_STACK_CHECK)
4932 rtx insn, seq;
4934 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4935 if (CALL_P (insn))
4937 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4938 start_sequence ();
4939 if (STACK_CHECK_MOVING_SP)
4940 anti_adjust_stack_and_probe (max_frame_size, true);
4941 else
4942 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4943 seq = get_insns ();
4944 end_sequence ();
4945 set_insn_locators (seq, prologue_locator);
4946 emit_insn_before (seq, stack_check_probe_note);
4947 break;
4951 /* End any sequences that failed to be closed due to syntax errors. */
4952 while (in_sequence_p ())
4953 end_sequence ();
4955 clear_pending_stack_adjust ();
4956 do_pending_stack_adjust ();
4958 /* Output a linenumber for the end of the function.
4959 SDB depends on this. */
4960 force_next_line_note ();
4961 set_curr_insn_source_location (input_location);
4963 /* Before the return label (if any), clobber the return
4964 registers so that they are not propagated live to the rest of
4965 the function. This can only happen with functions that drop
4966 through; if there had been a return statement, there would
4967 have either been a return rtx, or a jump to the return label.
4969 We delay actual code generation after the current_function_value_rtx
4970 is computed. */
4971 clobber_after = get_last_insn ();
4973 /* Output the label for the actual return from the function. */
4974 emit_label (return_label);
4976 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
4978 /* Let except.c know where it should emit the call to unregister
4979 the function context for sjlj exceptions. */
4980 if (flag_exceptions)
4981 sjlj_emit_function_exit_after (get_last_insn ());
4983 else
4985 /* We want to ensure that instructions that may trap are not
4986 moved into the epilogue by scheduling, because we don't
4987 always emit unwind information for the epilogue. */
4988 if (cfun->can_throw_non_call_exceptions)
4989 emit_insn (gen_blockage ());
4992 /* If this is an implementation of throw, do what's necessary to
4993 communicate between __builtin_eh_return and the epilogue. */
4994 expand_eh_return ();
4996 /* If scalar return value was computed in a pseudo-reg, or was a named
4997 return value that got dumped to the stack, copy that to the hard
4998 return register. */
4999 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5001 tree decl_result = DECL_RESULT (current_function_decl);
5002 rtx decl_rtl = DECL_RTL (decl_result);
5004 if (REG_P (decl_rtl)
5005 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5006 : DECL_REGISTER (decl_result))
5008 rtx real_decl_rtl = crtl->return_rtx;
5010 /* This should be set in assign_parms. */
5011 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5013 /* If this is a BLKmode structure being returned in registers,
5014 then use the mode computed in expand_return. Note that if
5015 decl_rtl is memory, then its mode may have been changed,
5016 but that crtl->return_rtx has not. */
5017 if (GET_MODE (real_decl_rtl) == BLKmode)
5018 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5020 /* If a non-BLKmode return value should be padded at the least
5021 significant end of the register, shift it left by the appropriate
5022 amount. BLKmode results are handled using the group load/store
5023 machinery. */
5024 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5025 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5027 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5028 REGNO (real_decl_rtl)),
5029 decl_rtl);
5030 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5032 /* If a named return value dumped decl_return to memory, then
5033 we may need to re-do the PROMOTE_MODE signed/unsigned
5034 extension. */
5035 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5037 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5038 promote_function_mode (TREE_TYPE (decl_result),
5039 GET_MODE (decl_rtl), &unsignedp,
5040 TREE_TYPE (current_function_decl), 1);
5042 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5044 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5046 /* If expand_function_start has created a PARALLEL for decl_rtl,
5047 move the result to the real return registers. Otherwise, do
5048 a group load from decl_rtl for a named return. */
5049 if (GET_CODE (decl_rtl) == PARALLEL)
5050 emit_group_move (real_decl_rtl, decl_rtl);
5051 else
5052 emit_group_load (real_decl_rtl, decl_rtl,
5053 TREE_TYPE (decl_result),
5054 int_size_in_bytes (TREE_TYPE (decl_result)));
5056 /* In the case of complex integer modes smaller than a word, we'll
5057 need to generate some non-trivial bitfield insertions. Do that
5058 on a pseudo and not the hard register. */
5059 else if (GET_CODE (decl_rtl) == CONCAT
5060 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5061 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5063 int old_generating_concat_p;
5064 rtx tmp;
5066 old_generating_concat_p = generating_concat_p;
5067 generating_concat_p = 0;
5068 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5069 generating_concat_p = old_generating_concat_p;
5071 emit_move_insn (tmp, decl_rtl);
5072 emit_move_insn (real_decl_rtl, tmp);
5074 else
5075 emit_move_insn (real_decl_rtl, decl_rtl);
5079 /* If returning a structure, arrange to return the address of the value
5080 in a place where debuggers expect to find it.
5082 If returning a structure PCC style,
5083 the caller also depends on this value.
5084 And cfun->returns_pcc_struct is not necessarily set. */
5085 if (cfun->returns_struct
5086 || cfun->returns_pcc_struct)
5088 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5089 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5090 rtx outgoing;
5092 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5093 type = TREE_TYPE (type);
5094 else
5095 value_address = XEXP (value_address, 0);
5097 outgoing = targetm.calls.function_value (build_pointer_type (type),
5098 current_function_decl, true);
5100 /* Mark this as a function return value so integrate will delete the
5101 assignment and USE below when inlining this function. */
5102 REG_FUNCTION_VALUE_P (outgoing) = 1;
5104 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5105 value_address = convert_memory_address (GET_MODE (outgoing),
5106 value_address);
5108 emit_move_insn (outgoing, value_address);
5110 /* Show return register used to hold result (in this case the address
5111 of the result. */
5112 crtl->return_rtx = outgoing;
5115 /* Emit the actual code to clobber return register. */
5117 rtx seq;
5119 start_sequence ();
5120 clobber_return_register ();
5121 seq = get_insns ();
5122 end_sequence ();
5124 emit_insn_after (seq, clobber_after);
5127 /* Output the label for the naked return from the function. */
5128 if (naked_return_label)
5129 emit_label (naked_return_label);
5131 /* @@@ This is a kludge. We want to ensure that instructions that
5132 may trap are not moved into the epilogue by scheduling, because
5133 we don't always emit unwind information for the epilogue. */
5134 if (cfun->can_throw_non_call_exceptions
5135 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5136 emit_insn (gen_blockage ());
5138 /* If stack protection is enabled for this function, check the guard. */
5139 if (crtl->stack_protect_guard)
5140 stack_protect_epilogue ();
5142 /* If we had calls to alloca, and this machine needs
5143 an accurate stack pointer to exit the function,
5144 insert some code to save and restore the stack pointer. */
5145 if (! EXIT_IGNORE_STACK
5146 && cfun->calls_alloca)
5148 rtx tem = 0, seq;
5150 start_sequence ();
5151 emit_stack_save (SAVE_FUNCTION, &tem);
5152 seq = get_insns ();
5153 end_sequence ();
5154 emit_insn_before (seq, parm_birth_insn);
5156 emit_stack_restore (SAVE_FUNCTION, tem);
5159 /* ??? This should no longer be necessary since stupid is no longer with
5160 us, but there are some parts of the compiler (eg reload_combine, and
5161 sh mach_dep_reorg) that still try and compute their own lifetime info
5162 instead of using the general framework. */
5163 use_return_register ();
5167 get_arg_pointer_save_area (void)
5169 rtx ret = arg_pointer_save_area;
5171 if (! ret)
5173 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5174 arg_pointer_save_area = ret;
5177 if (! crtl->arg_pointer_save_area_init)
5179 rtx seq;
5181 /* Save the arg pointer at the beginning of the function. The
5182 generated stack slot may not be a valid memory address, so we
5183 have to check it and fix it if necessary. */
5184 start_sequence ();
5185 emit_move_insn (validize_mem (ret),
5186 crtl->args.internal_arg_pointer);
5187 seq = get_insns ();
5188 end_sequence ();
5190 push_topmost_sequence ();
5191 emit_insn_after (seq, entry_of_function ());
5192 pop_topmost_sequence ();
5194 crtl->arg_pointer_save_area_init = true;
5197 return ret;
5200 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5201 for the first time. */
5203 static void
5204 record_insns (rtx insns, rtx end, htab_t *hashp)
5206 rtx tmp;
5207 htab_t hash = *hashp;
5209 if (hash == NULL)
5210 *hashp = hash
5211 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5213 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5215 void **slot = htab_find_slot (hash, tmp, INSERT);
5216 gcc_assert (*slot == NULL);
5217 *slot = tmp;
5221 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5222 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5223 insn, then record COPY as well. */
5225 void
5226 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5228 htab_t hash;
5229 void **slot;
5231 hash = epilogue_insn_hash;
5232 if (!hash || !htab_find (hash, insn))
5234 hash = prologue_insn_hash;
5235 if (!hash || !htab_find (hash, insn))
5236 return;
5239 slot = htab_find_slot (hash, copy, INSERT);
5240 gcc_assert (*slot == NULL);
5241 *slot = copy;
5244 /* Set the locator of the insn chain starting at INSN to LOC. */
5245 static void
5246 set_insn_locators (rtx insn, int loc)
5248 while (insn != NULL_RTX)
5250 if (INSN_P (insn))
5251 INSN_LOCATOR (insn) = loc;
5252 insn = NEXT_INSN (insn);
5256 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5257 we can be running after reorg, SEQUENCE rtl is possible. */
5259 static bool
5260 contains (const_rtx insn, htab_t hash)
5262 if (hash == NULL)
5263 return false;
5265 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5267 int i;
5268 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5269 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5270 return true;
5271 return false;
5274 return htab_find (hash, insn) != NULL;
5278 prologue_epilogue_contains (const_rtx insn)
5280 if (contains (insn, prologue_insn_hash))
5281 return 1;
5282 if (contains (insn, epilogue_insn_hash))
5283 return 1;
5284 return 0;
5287 #ifdef HAVE_simple_return
5289 /* A for_each_rtx subroutine of record_hard_reg_sets. */
5290 static int
5291 record_hard_reg_uses_1 (rtx *px, void *data)
5293 rtx x = *px;
5294 HARD_REG_SET *pused = (HARD_REG_SET *)data;
5296 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5297 add_to_hard_reg_set (pused, GET_MODE (x), REGNO (x));
5298 return 0;
5301 /* Like record_hard_reg_sets, but called through note_uses. */
5302 static void
5303 record_hard_reg_uses (rtx *px, void *data)
5305 for_each_rtx (px, record_hard_reg_uses_1, data);
5308 /* Return true if INSN requires the stack frame to be set up.
5309 PROLOGUE_USED contains the hard registers used in the function
5310 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5311 prologue to set up for the function. */
5312 static bool
5313 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5314 HARD_REG_SET set_up_by_prologue)
5316 df_ref *df_rec;
5317 HARD_REG_SET hardregs;
5318 unsigned regno;
5320 if (!INSN_P (insn) || DEBUG_INSN_P (insn))
5321 return false;
5322 if (CALL_P (insn))
5323 return !SIBLING_CALL_P (insn);
5325 CLEAR_HARD_REG_SET (hardregs);
5326 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5328 rtx dreg = DF_REF_REG (*df_rec);
5330 if (!REG_P (dreg))
5331 continue;
5333 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5334 REGNO (dreg));
5336 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5337 return true;
5338 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5340 if (TEST_HARD_REG_BIT (hardregs, regno)
5341 && df_regs_ever_live_p (regno))
5342 return true;
5344 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5346 rtx reg = DF_REF_REG (*df_rec);
5348 if (!REG_P (reg))
5349 continue;
5351 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5352 REGNO (reg));
5354 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5355 return true;
5357 return false;
5360 /* Look for sets of call-saved registers in the first block of the
5361 function, and move them down into successor blocks if the register
5362 is used only on one path. This exposes more opportunities for
5363 shrink-wrapping.
5364 These kinds of sets often occur when incoming argument registers are
5365 moved to call-saved registers because their values are live across
5366 one or more calls during the function. */
5368 static void
5369 prepare_shrink_wrap (basic_block entry_block)
5371 rtx insn, curr;
5372 FOR_BB_INSNS_SAFE (entry_block, insn, curr)
5374 basic_block next_bb;
5375 edge e, live_edge;
5376 edge_iterator ei;
5377 rtx set, scan;
5378 unsigned destreg, srcreg;
5380 if (!NONDEBUG_INSN_P (insn))
5381 continue;
5382 set = single_set (insn);
5383 if (!set)
5384 continue;
5386 if (!REG_P (SET_SRC (set)) || !REG_P (SET_DEST (set)))
5387 continue;
5388 srcreg = REGNO (SET_SRC (set));
5389 destreg = REGNO (SET_DEST (set));
5390 if (hard_regno_nregs[srcreg][GET_MODE (SET_SRC (set))] > 1
5391 || hard_regno_nregs[destreg][GET_MODE (SET_DEST (set))] > 1)
5392 continue;
5394 next_bb = entry_block;
5395 scan = insn;
5397 for (;;)
5399 live_edge = NULL;
5400 /* Try to find a single edge across which the register is live.
5401 If we find one, we'll try to move the set across this edge. */
5402 FOR_EACH_EDGE (e, ei, next_bb->succs)
5404 if (REGNO_REG_SET_P (df_get_live_in (e->dest), destreg))
5406 if (live_edge)
5408 live_edge = NULL;
5409 break;
5411 live_edge = e;
5414 if (!live_edge)
5415 break;
5416 /* We can sometimes encounter dead code. Don't try to move it
5417 into the exit block. */
5418 if (live_edge->dest == EXIT_BLOCK_PTR)
5419 break;
5420 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5421 break;
5422 while (scan != BB_END (next_bb))
5424 scan = NEXT_INSN (scan);
5425 if (NONDEBUG_INSN_P (scan))
5427 rtx link;
5428 HARD_REG_SET set_regs;
5430 CLEAR_HARD_REG_SET (set_regs);
5431 note_stores (PATTERN (scan), record_hard_reg_sets,
5432 &set_regs);
5433 if (CALL_P (scan))
5434 IOR_HARD_REG_SET (set_regs, call_used_reg_set);
5435 for (link = REG_NOTES (scan); link; link = XEXP (link, 1))
5436 if (REG_NOTE_KIND (link) == REG_INC)
5437 record_hard_reg_sets (XEXP (link, 0), NULL, &set_regs);
5439 if (TEST_HARD_REG_BIT (set_regs, srcreg)
5440 || reg_referenced_p (SET_DEST (set),
5441 PATTERN (scan)))
5443 scan = NULL_RTX;
5444 break;
5446 if (CALL_P (scan))
5448 rtx link = CALL_INSN_FUNCTION_USAGE (scan);
5449 while (link)
5451 rtx tmp = XEXP (link, 0);
5452 if (GET_CODE (tmp) == USE
5453 && reg_referenced_p (SET_DEST (set), tmp))
5454 break;
5455 link = XEXP (link, 1);
5457 if (link)
5459 scan = NULL_RTX;
5460 break;
5465 if (!scan)
5466 break;
5467 next_bb = live_edge->dest;
5470 if (next_bb != entry_block)
5472 rtx after = BB_HEAD (next_bb);
5473 while (!NOTE_P (after)
5474 || NOTE_KIND (after) != NOTE_INSN_BASIC_BLOCK)
5475 after = NEXT_INSN (after);
5476 emit_insn_after (PATTERN (insn), after);
5477 delete_insn (insn);
5482 #endif
5484 #ifdef HAVE_return
5485 /* Insert use of return register before the end of BB. */
5487 static void
5488 emit_use_return_register_into_block (basic_block bb)
5490 rtx seq;
5491 start_sequence ();
5492 use_return_register ();
5493 seq = get_insns ();
5494 end_sequence ();
5495 emit_insn_before (seq, BB_END (bb));
5499 /* Create a return pattern, either simple_return or return, depending on
5500 simple_p. */
5502 static rtx
5503 gen_return_pattern (bool simple_p)
5505 #ifdef HAVE_simple_return
5506 return simple_p ? gen_simple_return () : gen_return ();
5507 #else
5508 gcc_assert (!simple_p);
5509 return gen_return ();
5510 #endif
5513 /* Insert an appropriate return pattern at the end of block BB. This
5514 also means updating block_for_insn appropriately. SIMPLE_P is
5515 the same as in gen_return_pattern and passed to it. */
5517 static void
5518 emit_return_into_block (bool simple_p, basic_block bb)
5520 rtx jump, pat;
5521 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5522 pat = PATTERN (jump);
5523 if (GET_CODE (pat) == PARALLEL)
5524 pat = XVECEXP (pat, 0, 0);
5525 gcc_assert (ANY_RETURN_P (pat));
5526 JUMP_LABEL (jump) = pat;
5528 #endif
5530 /* Set JUMP_LABEL for a return insn. */
5532 void
5533 set_return_jump_label (rtx returnjump)
5535 rtx pat = PATTERN (returnjump);
5536 if (GET_CODE (pat) == PARALLEL)
5537 pat = XVECEXP (pat, 0, 0);
5538 if (ANY_RETURN_P (pat))
5539 JUMP_LABEL (returnjump) = pat;
5540 else
5541 JUMP_LABEL (returnjump) = ret_rtx;
5544 /* Return true if BB has any active insns. */
5545 static bool
5546 bb_active_p (basic_block bb)
5548 rtx label;
5550 /* Test whether there are active instructions in BB. */
5551 label = BB_END (bb);
5552 while (label && !LABEL_P (label))
5554 if (active_insn_p (label))
5555 break;
5556 label = PREV_INSN (label);
5558 return BB_HEAD (bb) != label || !LABEL_P (label);
5561 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5562 this into place with notes indicating where the prologue ends and where
5563 the epilogue begins. Update the basic block information when possible.
5565 Notes on epilogue placement:
5566 There are several kinds of edges to the exit block:
5567 * a single fallthru edge from LAST_BB
5568 * possibly, edges from blocks containing sibcalls
5569 * possibly, fake edges from infinite loops
5571 The epilogue is always emitted on the fallthru edge from the last basic
5572 block in the function, LAST_BB, into the exit block.
5574 If LAST_BB is empty except for a label, it is the target of every
5575 other basic block in the function that ends in a return. If a
5576 target has a return or simple_return pattern (possibly with
5577 conditional variants), these basic blocks can be changed so that a
5578 return insn is emitted into them, and their target is adjusted to
5579 the real exit block.
5581 Notes on shrink wrapping: We implement a fairly conservative
5582 version of shrink-wrapping rather than the textbook one. We only
5583 generate a single prologue and a single epilogue. This is
5584 sufficient to catch a number of interesting cases involving early
5585 exits.
5587 First, we identify the blocks that require the prologue to occur before
5588 them. These are the ones that modify a call-saved register, or reference
5589 any of the stack or frame pointer registers. To simplify things, we then
5590 mark everything reachable from these blocks as also requiring a prologue.
5591 This takes care of loops automatically, and avoids the need to examine
5592 whether MEMs reference the frame, since it is sufficient to check for
5593 occurrences of the stack or frame pointer.
5595 We then compute the set of blocks for which the need for a prologue
5596 is anticipatable (borrowing terminology from the shrink-wrapping
5597 description in Muchnick's book). These are the blocks which either
5598 require a prologue themselves, or those that have only successors
5599 where the prologue is anticipatable. The prologue needs to be
5600 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5601 is not. For the moment, we ensure that only one such edge exists.
5603 The epilogue is placed as described above, but we make a
5604 distinction between inserting return and simple_return patterns
5605 when modifying other blocks that end in a return. Blocks that end
5606 in a sibcall omit the sibcall_epilogue if the block is not in
5607 ANTIC. */
5609 static void
5610 thread_prologue_and_epilogue_insns (void)
5612 bool inserted;
5613 basic_block last_bb;
5614 bool last_bb_active ATTRIBUTE_UNUSED;
5615 #ifdef HAVE_simple_return
5616 VEC (rtx, heap) *unconverted_simple_returns = NULL;
5617 basic_block simple_return_block_hot = NULL;
5618 basic_block simple_return_block_cold = NULL;
5619 bool nonempty_prologue;
5620 #endif
5621 rtx returnjump ATTRIBUTE_UNUSED;
5622 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5623 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5624 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5625 edge_iterator ei;
5626 bitmap_head bb_flags;
5628 df_analyze ();
5630 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5632 inserted = false;
5633 seq = NULL_RTX;
5634 epilogue_end = NULL_RTX;
5635 returnjump = NULL_RTX;
5637 /* Can't deal with multiple successors of the entry block at the
5638 moment. Function should always have at least one entry
5639 point. */
5640 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5641 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5642 orig_entry_edge = entry_edge;
5644 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5645 if (exit_fallthru_edge != NULL)
5647 last_bb = exit_fallthru_edge->src;
5648 last_bb_active = bb_active_p (last_bb);
5650 else
5652 last_bb = NULL;
5653 last_bb_active = false;
5656 split_prologue_seq = NULL_RTX;
5657 if (flag_split_stack
5658 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5659 == NULL))
5661 #ifndef HAVE_split_stack_prologue
5662 gcc_unreachable ();
5663 #else
5664 gcc_assert (HAVE_split_stack_prologue);
5666 start_sequence ();
5667 emit_insn (gen_split_stack_prologue ());
5668 split_prologue_seq = get_insns ();
5669 end_sequence ();
5671 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5672 set_insn_locators (split_prologue_seq, prologue_locator);
5673 #endif
5676 prologue_seq = NULL_RTX;
5677 #ifdef HAVE_prologue
5678 if (HAVE_prologue)
5680 start_sequence ();
5681 seq = gen_prologue ();
5682 emit_insn (seq);
5684 /* Insert an explicit USE for the frame pointer
5685 if the profiling is on and the frame pointer is required. */
5686 if (crtl->profile && frame_pointer_needed)
5687 emit_use (hard_frame_pointer_rtx);
5689 /* Retain a map of the prologue insns. */
5690 record_insns (seq, NULL, &prologue_insn_hash);
5691 emit_note (NOTE_INSN_PROLOGUE_END);
5693 /* Ensure that instructions are not moved into the prologue when
5694 profiling is on. The call to the profiling routine can be
5695 emitted within the live range of a call-clobbered register. */
5696 if (!targetm.profile_before_prologue () && crtl->profile)
5697 emit_insn (gen_blockage ());
5699 prologue_seq = get_insns ();
5700 end_sequence ();
5701 set_insn_locators (prologue_seq, prologue_locator);
5703 #endif
5705 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5707 #ifdef HAVE_simple_return
5708 /* Try to perform a kind of shrink-wrapping, making sure the
5709 prologue/epilogue is emitted only around those parts of the
5710 function that require it. */
5712 nonempty_prologue = false;
5713 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5714 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5716 nonempty_prologue = true;
5717 break;
5720 if (flag_shrink_wrap && HAVE_simple_return
5721 && (targetm.profile_before_prologue () || !crtl->profile)
5722 && nonempty_prologue && !crtl->calls_eh_return)
5724 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
5725 HARD_REG_SET set_up_by_prologue;
5726 rtx p_insn;
5728 VEC(basic_block, heap) *vec;
5729 basic_block bb;
5730 bitmap_head bb_antic_flags;
5731 bitmap_head bb_on_list;
5733 if (dump_file)
5734 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
5736 /* Compute the registers set and used in the prologue. */
5737 CLEAR_HARD_REG_SET (prologue_clobbered);
5738 CLEAR_HARD_REG_SET (prologue_used);
5739 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
5741 HARD_REG_SET this_used;
5742 if (!NONDEBUG_INSN_P (p_insn))
5743 continue;
5745 CLEAR_HARD_REG_SET (this_used);
5746 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
5747 &this_used);
5748 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
5749 IOR_HARD_REG_SET (prologue_used, this_used);
5750 note_stores (PATTERN (p_insn), record_hard_reg_sets,
5751 &prologue_clobbered);
5754 prepare_shrink_wrap (entry_edge->dest);
5756 /* That may have inserted instructions into the last block. */
5757 if (last_bb && !last_bb_active)
5758 last_bb_active = bb_active_p (last_bb);
5760 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
5761 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
5763 /* Find the set of basic blocks that require a stack frame. */
5765 vec = VEC_alloc (basic_block, heap, n_basic_blocks);
5767 CLEAR_HARD_REG_SET (set_up_by_prologue);
5768 add_to_hard_reg_set (&set_up_by_prologue, Pmode, STACK_POINTER_REGNUM);
5769 add_to_hard_reg_set (&set_up_by_prologue, Pmode, ARG_POINTER_REGNUM);
5770 if (frame_pointer_needed)
5771 add_to_hard_reg_set (&set_up_by_prologue, Pmode,
5772 HARD_FRAME_POINTER_REGNUM);
5773 if (pic_offset_table_rtx)
5774 add_to_hard_reg_set (&set_up_by_prologue, Pmode,
5775 PIC_OFFSET_TABLE_REGNUM);
5777 FOR_EACH_BB (bb)
5779 rtx insn;
5780 /* As a special case, check for jumps to the last bb that
5781 cannot successfully be converted to simple_returns later
5782 on, and mark them as requiring a frame. These are
5783 conditional jumps that jump to their fallthru block, so
5784 it's not a case that is expected to occur often. */
5785 if (JUMP_P (BB_END (bb)) && any_condjump_p (BB_END (bb))
5786 && single_succ_p (bb)
5787 && !last_bb_active
5788 && single_succ (bb) == last_bb)
5790 bitmap_set_bit (&bb_flags, bb->index);
5791 VEC_quick_push (basic_block, vec, bb);
5793 else
5794 FOR_BB_INSNS (bb, insn)
5795 if (requires_stack_frame_p (insn, prologue_used,
5796 set_up_by_prologue))
5798 bitmap_set_bit (&bb_flags, bb->index);
5799 VEC_quick_push (basic_block, vec, bb);
5800 break;
5804 /* For every basic block that needs a prologue, mark all blocks
5805 reachable from it, so as to ensure they are also seen as
5806 requiring a prologue. */
5807 while (!VEC_empty (basic_block, vec))
5809 basic_block tmp_bb = VEC_pop (basic_block, vec);
5810 edge e;
5811 edge_iterator ei;
5812 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
5813 if (e->dest != EXIT_BLOCK_PTR
5814 && bitmap_set_bit (&bb_flags, e->dest->index))
5815 VEC_quick_push (basic_block, vec, e->dest);
5817 /* If the last basic block contains only a label, we'll be able
5818 to convert jumps to it to (potentially conditional) return
5819 insns later. This means we don't necessarily need a prologue
5820 for paths reaching it. */
5821 if (last_bb)
5823 if (!last_bb_active)
5824 bitmap_clear_bit (&bb_flags, last_bb->index);
5825 else if (!bitmap_bit_p (&bb_flags, last_bb->index))
5826 goto fail_shrinkwrap;
5829 /* Now walk backwards from every block that is marked as needing
5830 a prologue to compute the bb_antic_flags bitmap. */
5831 bitmap_copy (&bb_antic_flags, &bb_flags);
5832 FOR_EACH_BB (bb)
5834 edge e;
5835 edge_iterator ei;
5836 if (!bitmap_bit_p (&bb_flags, bb->index))
5837 continue;
5838 FOR_EACH_EDGE (e, ei, bb->preds)
5839 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
5840 && bitmap_set_bit (&bb_on_list, e->src->index))
5841 VEC_quick_push (basic_block, vec, e->src);
5843 while (!VEC_empty (basic_block, vec))
5845 basic_block tmp_bb = VEC_pop (basic_block, vec);
5846 edge e;
5847 edge_iterator ei;
5848 bool all_set = true;
5850 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
5851 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
5852 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
5854 all_set = false;
5855 break;
5858 if (all_set)
5860 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
5861 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
5862 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
5863 && bitmap_set_bit (&bb_on_list, e->src->index))
5864 VEC_quick_push (basic_block, vec, e->src);
5867 /* Find exactly one edge that leads to a block in ANTIC from
5868 a block that isn't. */
5869 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
5870 FOR_EACH_BB (bb)
5872 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
5873 continue;
5874 FOR_EACH_EDGE (e, ei, bb->preds)
5875 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
5877 if (entry_edge != orig_entry_edge)
5879 entry_edge = orig_entry_edge;
5880 if (dump_file)
5881 fprintf (dump_file, "More than one candidate edge.\n");
5882 goto fail_shrinkwrap;
5884 if (dump_file)
5885 fprintf (dump_file, "Found candidate edge for "
5886 "shrink-wrapping, %d->%d.\n", e->src->index,
5887 e->dest->index);
5888 entry_edge = e;
5892 /* Test whether the prologue is known to clobber any register
5893 (other than FP or SP) which are live on the edge. */
5894 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
5895 if (frame_pointer_needed)
5896 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
5897 CLEAR_HARD_REG_SET (live_on_edge);
5898 reg_set_to_hard_reg_set (&live_on_edge,
5899 df_get_live_in (entry_edge->dest));
5900 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
5902 entry_edge = orig_entry_edge;
5903 if (dump_file)
5904 fprintf (dump_file, "Shrink-wrapping aborted due to clobber.\n");
5906 else if (entry_edge != orig_entry_edge)
5908 crtl->shrink_wrapped = true;
5909 if (dump_file)
5910 fprintf (dump_file, "Performing shrink-wrapping.\n");
5913 fail_shrinkwrap:
5914 bitmap_clear (&bb_antic_flags);
5915 bitmap_clear (&bb_on_list);
5916 VEC_free (basic_block, heap, vec);
5918 #endif
5920 if (split_prologue_seq != NULL_RTX)
5922 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5923 inserted = true;
5925 if (prologue_seq != NULL_RTX)
5927 insert_insn_on_edge (prologue_seq, entry_edge);
5928 inserted = true;
5931 /* If the exit block has no non-fake predecessors, we don't need
5932 an epilogue. */
5933 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5934 if ((e->flags & EDGE_FAKE) == 0)
5935 break;
5936 if (e == NULL)
5937 goto epilogue_done;
5939 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5941 #ifdef HAVE_return
5942 /* If we're allowed to generate a simple return instruction, then by
5943 definition we don't need a full epilogue. If the last basic
5944 block before the exit block does not contain active instructions,
5945 examine its predecessors and try to emit (conditional) return
5946 instructions. */
5947 if (optimize && !last_bb_active
5948 && (HAVE_return || entry_edge != orig_entry_edge))
5950 edge_iterator ei2;
5951 int i;
5952 basic_block bb;
5953 rtx label;
5954 VEC(basic_block,heap) *src_bbs;
5956 if (exit_fallthru_edge == NULL)
5957 goto epilogue_done;
5958 label = BB_HEAD (last_bb);
5960 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
5961 FOR_EACH_EDGE (e, ei2, last_bb->preds)
5962 if (e->src != ENTRY_BLOCK_PTR)
5963 VEC_quick_push (basic_block, src_bbs, e->src);
5965 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
5967 bool simple_p;
5968 rtx jump;
5969 e = find_edge (bb, last_bb);
5971 jump = BB_END (bb);
5973 #ifdef HAVE_simple_return
5974 simple_p = (entry_edge != orig_entry_edge
5975 && !bitmap_bit_p (&bb_flags, bb->index));
5976 #else
5977 simple_p = false;
5978 #endif
5980 if (!simple_p
5981 && (!HAVE_return || !JUMP_P (jump)
5982 || JUMP_LABEL (jump) != label))
5983 continue;
5985 /* If we have an unconditional jump, we can replace that
5986 with a simple return instruction. */
5987 if (!JUMP_P (jump))
5989 emit_barrier_after (BB_END (bb));
5990 emit_return_into_block (simple_p, bb);
5992 else if (simplejump_p (jump))
5994 /* The use of the return register might be present in the exit
5995 fallthru block. Either:
5996 - removing the use is safe, and we should remove the use in
5997 the exit fallthru block, or
5998 - removing the use is not safe, and we should add it here.
5999 For now, we conservatively choose the latter. Either of the
6000 2 helps in crossjumping. */
6001 emit_use_return_register_into_block (bb);
6003 emit_return_into_block (simple_p, bb);
6004 delete_insn (jump);
6006 else if (condjump_p (jump) && JUMP_LABEL (jump) != label)
6008 basic_block new_bb;
6009 edge new_e;
6011 gcc_assert (simple_p);
6012 new_bb = split_edge (e);
6013 emit_barrier_after (BB_END (new_bb));
6014 emit_return_into_block (simple_p, new_bb);
6015 #ifdef HAVE_simple_return
6016 if (BB_PARTITION (new_bb) == BB_HOT_PARTITION)
6017 simple_return_block_hot = new_bb;
6018 else
6019 simple_return_block_cold = new_bb;
6020 #endif
6021 new_e = single_succ_edge (new_bb);
6022 redirect_edge_succ (new_e, EXIT_BLOCK_PTR);
6024 continue;
6026 /* If we have a conditional jump branching to the last
6027 block, we can try to replace that with a conditional
6028 return instruction. */
6029 else if (condjump_p (jump))
6031 rtx dest;
6032 if (simple_p)
6033 dest = simple_return_rtx;
6034 else
6035 dest = ret_rtx;
6036 if (! redirect_jump (jump, dest, 0))
6038 #ifdef HAVE_simple_return
6039 if (simple_p)
6040 VEC_safe_push (rtx, heap,
6041 unconverted_simple_returns, jump);
6042 #endif
6043 continue;
6046 /* See comment in simple_jump_p case above. */
6047 emit_use_return_register_into_block (bb);
6049 /* If this block has only one successor, it both jumps
6050 and falls through to the fallthru block, so we can't
6051 delete the edge. */
6052 if (single_succ_p (bb))
6053 continue;
6055 else
6057 #ifdef HAVE_simple_return
6058 if (simple_p)
6059 VEC_safe_push (rtx, heap,
6060 unconverted_simple_returns, jump);
6061 #endif
6062 continue;
6065 /* Fix up the CFG for the successful change we just made. */
6066 redirect_edge_succ (e, EXIT_BLOCK_PTR);
6068 VEC_free (basic_block, heap, src_bbs);
6070 if (HAVE_return)
6072 /* Emit a return insn for the exit fallthru block. Whether
6073 this is still reachable will be determined later. */
6075 emit_barrier_after (BB_END (last_bb));
6076 emit_return_into_block (false, last_bb);
6077 epilogue_end = BB_END (last_bb);
6078 if (JUMP_P (epilogue_end))
6079 set_return_jump_label (epilogue_end);
6080 single_succ_edge (last_bb)->flags &= ~EDGE_FALLTHRU;
6081 goto epilogue_done;
6084 #endif
6086 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6087 this marker for the splits of EH_RETURN patterns, and nothing else
6088 uses the flag in the meantime. */
6089 epilogue_completed = 1;
6091 #ifdef HAVE_eh_return
6092 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6093 some targets, these get split to a special version of the epilogue
6094 code. In order to be able to properly annotate these with unwind
6095 info, try to split them now. If we get a valid split, drop an
6096 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6097 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6099 rtx prev, last, trial;
6101 if (e->flags & EDGE_FALLTHRU)
6102 continue;
6103 last = BB_END (e->src);
6104 if (!eh_returnjump_p (last))
6105 continue;
6107 prev = PREV_INSN (last);
6108 trial = try_split (PATTERN (last), last, 1);
6109 if (trial == last)
6110 continue;
6112 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6113 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6115 #endif
6117 /* If nothing falls through into the exit block, we don't need an
6118 epilogue. */
6120 if (exit_fallthru_edge == NULL)
6121 goto epilogue_done;
6123 #ifdef HAVE_epilogue
6124 if (HAVE_epilogue)
6126 start_sequence ();
6127 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6128 seq = gen_epilogue ();
6129 if (seq)
6130 emit_jump_insn (seq);
6132 /* Retain a map of the epilogue insns. */
6133 record_insns (seq, NULL, &epilogue_insn_hash);
6134 set_insn_locators (seq, epilogue_locator);
6136 seq = get_insns ();
6137 returnjump = get_last_insn ();
6138 end_sequence ();
6140 insert_insn_on_edge (seq, exit_fallthru_edge);
6141 inserted = true;
6143 if (JUMP_P (returnjump))
6144 set_return_jump_label (returnjump);
6146 else
6147 #endif
6149 basic_block cur_bb;
6151 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6152 goto epilogue_done;
6153 /* We have a fall-through edge to the exit block, the source is not
6154 at the end of the function, and there will be an assembler epilogue
6155 at the end of the function.
6156 We can't use force_nonfallthru here, because that would try to
6157 use return. Inserting a jump 'by hand' is extremely messy, so
6158 we take advantage of cfg_layout_finalize using
6159 fixup_fallthru_exit_predecessor. */
6160 cfg_layout_initialize (0);
6161 FOR_EACH_BB (cur_bb)
6162 if (cur_bb->index >= NUM_FIXED_BLOCKS
6163 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6164 cur_bb->aux = cur_bb->next_bb;
6165 cfg_layout_finalize ();
6168 epilogue_done:
6170 default_rtl_profile ();
6172 if (inserted)
6174 sbitmap blocks;
6176 commit_edge_insertions ();
6178 /* Look for basic blocks within the prologue insns. */
6179 blocks = sbitmap_alloc (last_basic_block);
6180 sbitmap_zero (blocks);
6181 SET_BIT (blocks, entry_edge->dest->index);
6182 SET_BIT (blocks, orig_entry_edge->dest->index);
6183 find_many_sub_basic_blocks (blocks);
6184 sbitmap_free (blocks);
6186 /* The epilogue insns we inserted may cause the exit edge to no longer
6187 be fallthru. */
6188 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6190 if (((e->flags & EDGE_FALLTHRU) != 0)
6191 && returnjump_p (BB_END (e->src)))
6192 e->flags &= ~EDGE_FALLTHRU;
6196 #ifdef HAVE_simple_return
6197 /* If there were branches to an empty LAST_BB which we tried to
6198 convert to conditional simple_returns, but couldn't for some
6199 reason, create a block to hold a simple_return insn and redirect
6200 those remaining edges. */
6201 if (!VEC_empty (rtx, unconverted_simple_returns))
6203 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6204 rtx jump;
6205 int i;
6207 gcc_assert (entry_edge != orig_entry_edge);
6209 /* See if we can reuse the last insn that was emitted for the
6210 epilogue. */
6211 if (returnjump != NULL_RTX
6212 && JUMP_LABEL (returnjump) == simple_return_rtx)
6214 edge e = split_block (exit_fallthru_edge->src,
6215 PREV_INSN (returnjump));
6216 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6217 simple_return_block_hot = e->dest;
6218 else
6219 simple_return_block_cold = e->dest;
6222 FOR_EACH_VEC_ELT (rtx, unconverted_simple_returns, i, jump)
6224 basic_block src_bb = BLOCK_FOR_INSN (jump);
6225 edge e = find_edge (src_bb, last_bb);
6226 basic_block *pdest_bb;
6228 if (BB_PARTITION (src_bb) == BB_HOT_PARTITION)
6229 pdest_bb = &simple_return_block_hot;
6230 else
6231 pdest_bb = &simple_return_block_cold;
6232 if (*pdest_bb == NULL)
6234 basic_block bb;
6235 rtx start;
6237 bb = create_basic_block (NULL, NULL, exit_pred);
6238 BB_COPY_PARTITION (bb, e->src);
6239 start = emit_jump_insn_after (gen_simple_return (),
6240 BB_END (bb));
6241 JUMP_LABEL (start) = simple_return_rtx;
6242 emit_barrier_after (start);
6244 *pdest_bb = bb;
6245 make_edge (bb, EXIT_BLOCK_PTR, 0);
6247 redirect_edge_and_branch_force (e, *pdest_bb);
6249 VEC_free (rtx, heap, unconverted_simple_returns);
6251 #endif
6253 #ifdef HAVE_sibcall_epilogue
6254 /* Emit sibling epilogues before any sibling call sites. */
6255 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6257 basic_block bb = e->src;
6258 rtx insn = BB_END (bb);
6259 rtx ep_seq;
6261 if (!CALL_P (insn)
6262 || ! SIBLING_CALL_P (insn)
6263 || (entry_edge != orig_entry_edge
6264 && !bitmap_bit_p (&bb_flags, bb->index)))
6266 ei_next (&ei);
6267 continue;
6270 ep_seq = gen_sibcall_epilogue ();
6271 if (ep_seq)
6273 start_sequence ();
6274 emit_note (NOTE_INSN_EPILOGUE_BEG);
6275 emit_insn (ep_seq);
6276 seq = get_insns ();
6277 end_sequence ();
6279 /* Retain a map of the epilogue insns. Used in life analysis to
6280 avoid getting rid of sibcall epilogue insns. Do this before we
6281 actually emit the sequence. */
6282 record_insns (seq, NULL, &epilogue_insn_hash);
6283 set_insn_locators (seq, epilogue_locator);
6285 emit_insn_before (seq, insn);
6287 ei_next (&ei);
6289 #endif
6291 #ifdef HAVE_epilogue
6292 if (epilogue_end)
6294 rtx insn, next;
6296 /* Similarly, move any line notes that appear after the epilogue.
6297 There is no need, however, to be quite so anal about the existence
6298 of such a note. Also possibly move
6299 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6300 info generation. */
6301 for (insn = epilogue_end; insn; insn = next)
6303 next = NEXT_INSN (insn);
6304 if (NOTE_P (insn)
6305 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6306 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6309 #endif
6311 bitmap_clear (&bb_flags);
6313 /* Threading the prologue and epilogue changes the artificial refs
6314 in the entry and exit blocks. */
6315 epilogue_completed = 1;
6316 df_update_entry_exit_and_calls ();
6319 /* Reposition the prologue-end and epilogue-begin notes after
6320 instruction scheduling. */
6322 void
6323 reposition_prologue_and_epilogue_notes (void)
6325 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6326 || defined (HAVE_sibcall_epilogue)
6327 /* Since the hash table is created on demand, the fact that it is
6328 non-null is a signal that it is non-empty. */
6329 if (prologue_insn_hash != NULL)
6331 size_t len = htab_elements (prologue_insn_hash);
6332 rtx insn, last = NULL, note = NULL;
6334 /* Scan from the beginning until we reach the last prologue insn. */
6335 /* ??? While we do have the CFG intact, there are two problems:
6336 (1) The prologue can contain loops (typically probing the stack),
6337 which means that the end of the prologue isn't in the first bb.
6338 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6339 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6341 if (NOTE_P (insn))
6343 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6344 note = insn;
6346 else if (contains (insn, prologue_insn_hash))
6348 last = insn;
6349 if (--len == 0)
6350 break;
6354 if (last)
6356 if (note == NULL)
6358 /* Scan forward looking for the PROLOGUE_END note. It should
6359 be right at the beginning of the block, possibly with other
6360 insn notes that got moved there. */
6361 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6363 if (NOTE_P (note)
6364 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6365 break;
6369 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6370 if (LABEL_P (last))
6371 last = NEXT_INSN (last);
6372 reorder_insns (note, note, last);
6376 if (epilogue_insn_hash != NULL)
6378 edge_iterator ei;
6379 edge e;
6381 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6383 rtx insn, first = NULL, note = NULL;
6384 basic_block bb = e->src;
6386 /* Scan from the beginning until we reach the first epilogue insn. */
6387 FOR_BB_INSNS (bb, insn)
6389 if (NOTE_P (insn))
6391 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6393 note = insn;
6394 if (first != NULL)
6395 break;
6398 else if (first == NULL && contains (insn, epilogue_insn_hash))
6400 first = insn;
6401 if (note != NULL)
6402 break;
6406 if (note)
6408 /* If the function has a single basic block, and no real
6409 epilogue insns (e.g. sibcall with no cleanup), the
6410 epilogue note can get scheduled before the prologue
6411 note. If we have frame related prologue insns, having
6412 them scanned during the epilogue will result in a crash.
6413 In this case re-order the epilogue note to just before
6414 the last insn in the block. */
6415 if (first == NULL)
6416 first = BB_END (bb);
6418 if (PREV_INSN (first) != note)
6419 reorder_insns (note, note, PREV_INSN (first));
6423 #endif /* HAVE_prologue or HAVE_epilogue */
6426 /* Returns the name of the current function. */
6427 const char *
6428 current_function_name (void)
6430 if (cfun == NULL)
6431 return "<none>";
6432 return lang_hooks.decl_printable_name (cfun->decl, 2);
6436 static unsigned int
6437 rest_of_handle_check_leaf_regs (void)
6439 #ifdef LEAF_REGISTERS
6440 current_function_uses_only_leaf_regs
6441 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6442 #endif
6443 return 0;
6446 /* Insert a TYPE into the used types hash table of CFUN. */
6448 static void
6449 used_types_insert_helper (tree type, struct function *func)
6451 if (type != NULL && func != NULL)
6453 void **slot;
6455 if (func->used_types_hash == NULL)
6456 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6457 htab_eq_pointer, NULL);
6458 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6459 if (*slot == NULL)
6460 *slot = type;
6464 /* Given a type, insert it into the used hash table in cfun. */
6465 void
6466 used_types_insert (tree t)
6468 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6469 if (TYPE_NAME (t))
6470 break;
6471 else
6472 t = TREE_TYPE (t);
6473 if (TREE_CODE (t) == ERROR_MARK)
6474 return;
6475 if (TYPE_NAME (t) == NULL_TREE
6476 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6477 t = TYPE_MAIN_VARIANT (t);
6478 if (debug_info_level > DINFO_LEVEL_NONE)
6480 if (cfun)
6481 used_types_insert_helper (t, cfun);
6482 else
6483 /* So this might be a type referenced by a global variable.
6484 Record that type so that we can later decide to emit its debug
6485 information. */
6486 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
6490 /* Helper to Hash a struct types_used_by_vars_entry. */
6492 static hashval_t
6493 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6495 gcc_assert (entry && entry->var_decl && entry->type);
6497 return iterative_hash_object (entry->type,
6498 iterative_hash_object (entry->var_decl, 0));
6501 /* Hash function of the types_used_by_vars_entry hash table. */
6503 hashval_t
6504 types_used_by_vars_do_hash (const void *x)
6506 const struct types_used_by_vars_entry *entry =
6507 (const struct types_used_by_vars_entry *) x;
6509 return hash_types_used_by_vars_entry (entry);
6512 /*Equality function of the types_used_by_vars_entry hash table. */
6515 types_used_by_vars_eq (const void *x1, const void *x2)
6517 const struct types_used_by_vars_entry *e1 =
6518 (const struct types_used_by_vars_entry *) x1;
6519 const struct types_used_by_vars_entry *e2 =
6520 (const struct types_used_by_vars_entry *)x2;
6522 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6525 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6527 void
6528 types_used_by_var_decl_insert (tree type, tree var_decl)
6530 if (type != NULL && var_decl != NULL)
6532 void **slot;
6533 struct types_used_by_vars_entry e;
6534 e.var_decl = var_decl;
6535 e.type = type;
6536 if (types_used_by_vars_hash == NULL)
6537 types_used_by_vars_hash =
6538 htab_create_ggc (37, types_used_by_vars_do_hash,
6539 types_used_by_vars_eq, NULL);
6540 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6541 hash_types_used_by_vars_entry (&e), INSERT);
6542 if (*slot == NULL)
6544 struct types_used_by_vars_entry *entry;
6545 entry = ggc_alloc_types_used_by_vars_entry ();
6546 entry->type = type;
6547 entry->var_decl = var_decl;
6548 *slot = entry;
6553 struct rtl_opt_pass pass_leaf_regs =
6556 RTL_PASS,
6557 "*leaf_regs", /* name */
6558 NULL, /* gate */
6559 rest_of_handle_check_leaf_regs, /* execute */
6560 NULL, /* sub */
6561 NULL, /* next */
6562 0, /* static_pass_number */
6563 TV_NONE, /* tv_id */
6564 0, /* properties_required */
6565 0, /* properties_provided */
6566 0, /* properties_destroyed */
6567 0, /* todo_flags_start */
6568 0 /* todo_flags_finish */
6572 static unsigned int
6573 rest_of_handle_thread_prologue_and_epilogue (void)
6575 if (optimize)
6576 cleanup_cfg (CLEANUP_EXPENSIVE);
6578 /* On some machines, the prologue and epilogue code, or parts thereof,
6579 can be represented as RTL. Doing so lets us schedule insns between
6580 it and the rest of the code and also allows delayed branch
6581 scheduling to operate in the epilogue. */
6582 thread_prologue_and_epilogue_insns ();
6584 /* The stack usage info is finalized during prologue expansion. */
6585 if (flag_stack_usage_info)
6586 output_stack_usage ();
6588 return 0;
6591 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6594 RTL_PASS,
6595 "pro_and_epilogue", /* name */
6596 NULL, /* gate */
6597 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6598 NULL, /* sub */
6599 NULL, /* next */
6600 0, /* static_pass_number */
6601 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6602 0, /* properties_required */
6603 0, /* properties_provided */
6604 0, /* properties_destroyed */
6605 TODO_verify_flow, /* todo_flags_start */
6606 TODO_df_verify |
6607 TODO_df_finish | TODO_verify_rtl_sharing |
6608 TODO_ggc_collect /* todo_flags_finish */
6613 /* This mini-pass fixes fall-out from SSA in asm statements that have
6614 in-out constraints. Say you start with
6616 orig = inout;
6617 asm ("": "+mr" (inout));
6618 use (orig);
6620 which is transformed very early to use explicit output and match operands:
6622 orig = inout;
6623 asm ("": "=mr" (inout) : "0" (inout));
6624 use (orig);
6626 Or, after SSA and copyprop,
6628 asm ("": "=mr" (inout_2) : "0" (inout_1));
6629 use (inout_1);
6631 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6632 they represent two separate values, so they will get different pseudo
6633 registers during expansion. Then, since the two operands need to match
6634 per the constraints, but use different pseudo registers, reload can
6635 only register a reload for these operands. But reloads can only be
6636 satisfied by hardregs, not by memory, so we need a register for this
6637 reload, just because we are presented with non-matching operands.
6638 So, even though we allow memory for this operand, no memory can be
6639 used for it, just because the two operands don't match. This can
6640 cause reload failures on register-starved targets.
6642 So it's a symptom of reload not being able to use memory for reloads
6643 or, alternatively it's also a symptom of both operands not coming into
6644 reload as matching (in which case the pseudo could go to memory just
6645 fine, as the alternative allows it, and no reload would be necessary).
6646 We fix the latter problem here, by transforming
6648 asm ("": "=mr" (inout_2) : "0" (inout_1));
6650 back to
6652 inout_2 = inout_1;
6653 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6655 static void
6656 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6658 int i;
6659 bool changed = false;
6660 rtx op = SET_SRC (p_sets[0]);
6661 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6662 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6663 bool *output_matched = XALLOCAVEC (bool, noutputs);
6665 memset (output_matched, 0, noutputs * sizeof (bool));
6666 for (i = 0; i < ninputs; i++)
6668 rtx input, output, insns;
6669 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6670 char *end;
6671 int match, j;
6673 if (*constraint == '%')
6674 constraint++;
6676 match = strtoul (constraint, &end, 10);
6677 if (end == constraint)
6678 continue;
6680 gcc_assert (match < noutputs);
6681 output = SET_DEST (p_sets[match]);
6682 input = RTVEC_ELT (inputs, i);
6683 /* Only do the transformation for pseudos. */
6684 if (! REG_P (output)
6685 || rtx_equal_p (output, input)
6686 || (GET_MODE (input) != VOIDmode
6687 && GET_MODE (input) != GET_MODE (output)))
6688 continue;
6690 /* We can't do anything if the output is also used as input,
6691 as we're going to overwrite it. */
6692 for (j = 0; j < ninputs; j++)
6693 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6694 break;
6695 if (j != ninputs)
6696 continue;
6698 /* Avoid changing the same input several times. For
6699 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6700 only change in once (to out1), rather than changing it
6701 first to out1 and afterwards to out2. */
6702 if (i > 0)
6704 for (j = 0; j < noutputs; j++)
6705 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6706 break;
6707 if (j != noutputs)
6708 continue;
6710 output_matched[match] = true;
6712 start_sequence ();
6713 emit_move_insn (output, input);
6714 insns = get_insns ();
6715 end_sequence ();
6716 emit_insn_before (insns, insn);
6718 /* Now replace all mentions of the input with output. We can't
6719 just replace the occurrence in inputs[i], as the register might
6720 also be used in some other input (or even in an address of an
6721 output), which would mean possibly increasing the number of
6722 inputs by one (namely 'output' in addition), which might pose
6723 a too complicated problem for reload to solve. E.g. this situation:
6725 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6727 Here 'input' is used in two occurrences as input (once for the
6728 input operand, once for the address in the second output operand).
6729 If we would replace only the occurrence of the input operand (to
6730 make the matching) we would be left with this:
6732 output = input
6733 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6735 Now we suddenly have two different input values (containing the same
6736 value, but different pseudos) where we formerly had only one.
6737 With more complicated asms this might lead to reload failures
6738 which wouldn't have happen without this pass. So, iterate over
6739 all operands and replace all occurrences of the register used. */
6740 for (j = 0; j < noutputs; j++)
6741 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6742 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6743 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6744 input, output);
6745 for (j = 0; j < ninputs; j++)
6746 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6747 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6748 input, output);
6750 changed = true;
6753 if (changed)
6754 df_insn_rescan (insn);
6757 static unsigned
6758 rest_of_match_asm_constraints (void)
6760 basic_block bb;
6761 rtx insn, pat, *p_sets;
6762 int noutputs;
6764 if (!crtl->has_asm_statement)
6765 return 0;
6767 df_set_flags (DF_DEFER_INSN_RESCAN);
6768 FOR_EACH_BB (bb)
6770 FOR_BB_INSNS (bb, insn)
6772 if (!INSN_P (insn))
6773 continue;
6775 pat = PATTERN (insn);
6776 if (GET_CODE (pat) == PARALLEL)
6777 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6778 else if (GET_CODE (pat) == SET)
6779 p_sets = &PATTERN (insn), noutputs = 1;
6780 else
6781 continue;
6783 if (GET_CODE (*p_sets) == SET
6784 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6785 match_asm_constraints_1 (insn, p_sets, noutputs);
6789 return TODO_df_finish;
6792 struct rtl_opt_pass pass_match_asm_constraints =
6795 RTL_PASS,
6796 "asmcons", /* name */
6797 NULL, /* gate */
6798 rest_of_match_asm_constraints, /* execute */
6799 NULL, /* sub */
6800 NULL, /* next */
6801 0, /* static_pass_number */
6802 TV_NONE, /* tv_id */
6803 0, /* properties_required */
6804 0, /* properties_provided */
6805 0, /* properties_destroyed */
6806 0, /* todo_flags_start */
6807 0 /* todo_flags_finish */
6812 #include "gt-function.h"