2008-12-09 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / function.c
blob29fe1b0b52d5c5a9ec1f30786cd39e17992b3042
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
69 /* So we can assign to cfun in this file. */
70 #undef cfun
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
161 rtx slot;
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
165 /* The alignment (in bits) of the slot. */
166 unsigned int align;
167 /* The size, in units, of the slot. */
168 HOST_WIDE_INT size;
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
174 /* Nonzero if this temporary is currently in use. */
175 char in_use;
176 /* Nonzero if this temporary has its address taken. */
177 char addr_taken;
178 /* Nesting level at which this slot is being used. */
179 int level;
180 /* Nonzero if this should survive a call to free_temp_slots. */
181 int keep;
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
203 #ifdef HAVE_return
204 static void emit_return_into_block (basic_block);
205 #endif
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
211 /* Stack of nested functions. */
212 /* Keep track of the cfun stack. */
214 typedef struct function *function_p;
216 DEF_VEC_P(function_p);
217 DEF_VEC_ALLOC_P(function_p,heap);
218 static VEC(function_p,heap) *function_context_stack;
220 /* Save the current context for compilation of a nested function.
221 This is called from language-specific code. */
223 void
224 push_function_context (void)
226 if (cfun == 0)
227 allocate_struct_function (NULL, false);
229 VEC_safe_push (function_p, heap, function_context_stack, cfun);
230 set_cfun (NULL);
233 /* Restore the last saved context, at the end of a nested function.
234 This function is called from language-specific code. */
236 void
237 pop_function_context (void)
239 struct function *p = VEC_pop (function_p, function_context_stack);
240 set_cfun (p);
241 current_function_decl = p->decl;
243 /* Reset variables that have known state during rtx generation. */
244 virtuals_instantiated = 0;
245 generating_concat_p = 1;
248 /* Clear out all parts of the state in F that can safely be discarded
249 after the function has been parsed, but not compiled, to let
250 garbage collection reclaim the memory. */
252 void
253 free_after_parsing (struct function *f)
255 f->language = 0;
258 /* Clear out all parts of the state in F that can safely be discarded
259 after the function has been compiled, to let garbage collection
260 reclaim the memory. */
262 void
263 free_after_compilation (struct function *f)
265 VEC_free (int, heap, prologue);
266 VEC_free (int, heap, epilogue);
267 VEC_free (int, heap, sibcall_epilogue);
268 if (crtl->emit.regno_pointer_align)
269 free (crtl->emit.regno_pointer_align);
271 memset (crtl, 0, sizeof (struct rtl_data));
272 f->eh = NULL;
273 f->machine = NULL;
274 f->cfg = NULL;
276 regno_reg_rtx = NULL;
277 insn_locators_free ();
280 /* Return size needed for stack frame based on slots so far allocated.
281 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
282 the caller may have to do that. */
284 HOST_WIDE_INT
285 get_frame_size (void)
287 if (FRAME_GROWS_DOWNWARD)
288 return -frame_offset;
289 else
290 return frame_offset;
293 /* Issue an error message and return TRUE if frame OFFSET overflows in
294 the signed target pointer arithmetics for function FUNC. Otherwise
295 return FALSE. */
297 bool
298 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
300 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
302 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
303 /* Leave room for the fixed part of the frame. */
304 - 64 * UNITS_PER_WORD)
306 error ("%Jtotal size of local objects too large", func);
307 return TRUE;
310 return FALSE;
313 /* Return stack slot alignment in bits for TYPE and MODE. */
315 static unsigned int
316 get_stack_local_alignment (tree type, enum machine_mode mode)
318 unsigned int alignment;
320 if (mode == BLKmode)
321 alignment = BIGGEST_ALIGNMENT;
322 else
323 alignment = GET_MODE_ALIGNMENT (mode);
325 /* Allow the frond-end to (possibly) increase the alignment of this
326 stack slot. */
327 if (! type)
328 type = lang_hooks.types.type_for_mode (mode, 0);
330 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
333 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
334 with machine mode MODE.
336 ALIGN controls the amount of alignment for the address of the slot:
337 0 means according to MODE,
338 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
339 -2 means use BITS_PER_UNIT,
340 positive specifies alignment boundary in bits.
342 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
344 We do not round to stack_boundary here. */
347 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
348 int align,
349 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
351 rtx x, addr;
352 int bigend_correction = 0;
353 unsigned int alignment, alignment_in_bits;
354 int frame_off, frame_alignment, frame_phase;
356 if (align == 0)
358 alignment = get_stack_local_alignment (NULL, mode);
359 alignment /= BITS_PER_UNIT;
361 else if (align == -1)
363 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
364 size = CEIL_ROUND (size, alignment);
366 else if (align == -2)
367 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
368 else
369 alignment = align / BITS_PER_UNIT;
371 alignment_in_bits = alignment * BITS_PER_UNIT;
373 if (FRAME_GROWS_DOWNWARD)
374 frame_offset -= size;
376 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
377 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
379 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
380 alignment = alignment_in_bits / BITS_PER_UNIT;
383 if (SUPPORTS_STACK_ALIGNMENT)
385 if (crtl->stack_alignment_estimated < alignment_in_bits)
387 if (!crtl->stack_realign_processed)
388 crtl->stack_alignment_estimated = alignment_in_bits;
389 else
391 /* If stack is realigned and stack alignment value
392 hasn't been finalized, it is OK not to increase
393 stack_alignment_estimated. The bigger alignment
394 requirement is recorded in stack_alignment_needed
395 below. */
396 gcc_assert (!crtl->stack_realign_finalized);
397 if (!crtl->stack_realign_needed)
399 /* It is OK to reduce the alignment as long as the
400 requested size is 0 or the estimated stack
401 alignment >= mode alignment. */
402 gcc_assert (reduce_alignment_ok
403 || size == 0
404 || (crtl->stack_alignment_estimated
405 >= GET_MODE_ALIGNMENT (mode)));
406 alignment_in_bits = crtl->stack_alignment_estimated;
407 alignment = alignment_in_bits / BITS_PER_UNIT;
413 if (crtl->stack_alignment_needed < alignment_in_bits)
414 crtl->stack_alignment_needed = alignment_in_bits;
415 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
416 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
418 /* Calculate how many bytes the start of local variables is off from
419 stack alignment. */
420 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
421 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
422 frame_phase = frame_off ? frame_alignment - frame_off : 0;
424 /* Round the frame offset to the specified alignment. The default is
425 to always honor requests to align the stack but a port may choose to
426 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
427 if (STACK_ALIGNMENT_NEEDED
428 || mode != BLKmode
429 || size != 0)
431 /* We must be careful here, since FRAME_OFFSET might be negative and
432 division with a negative dividend isn't as well defined as we might
433 like. So we instead assume that ALIGNMENT is a power of two and
434 use logical operations which are unambiguous. */
435 if (FRAME_GROWS_DOWNWARD)
436 frame_offset
437 = (FLOOR_ROUND (frame_offset - frame_phase,
438 (unsigned HOST_WIDE_INT) alignment)
439 + frame_phase);
440 else
441 frame_offset
442 = (CEIL_ROUND (frame_offset - frame_phase,
443 (unsigned HOST_WIDE_INT) alignment)
444 + frame_phase);
447 /* On a big-endian machine, if we are allocating more space than we will use,
448 use the least significant bytes of those that are allocated. */
449 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
450 bigend_correction = size - GET_MODE_SIZE (mode);
452 /* If we have already instantiated virtual registers, return the actual
453 address relative to the frame pointer. */
454 if (virtuals_instantiated)
455 addr = plus_constant (frame_pointer_rtx,
456 trunc_int_for_mode
457 (frame_offset + bigend_correction
458 + STARTING_FRAME_OFFSET, Pmode));
459 else
460 addr = plus_constant (virtual_stack_vars_rtx,
461 trunc_int_for_mode
462 (frame_offset + bigend_correction,
463 Pmode));
465 if (!FRAME_GROWS_DOWNWARD)
466 frame_offset += size;
468 x = gen_rtx_MEM (mode, addr);
469 set_mem_align (x, alignment_in_bits);
470 MEM_NOTRAP_P (x) = 1;
472 stack_slot_list
473 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
475 if (frame_offset_overflow (frame_offset, current_function_decl))
476 frame_offset = 0;
478 return x;
481 /* Wrap up assign_stack_local_1 with last parameter as false. */
484 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
486 return assign_stack_local_1 (mode, size, align, false);
489 /* Removes temporary slot TEMP from LIST. */
491 static void
492 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
494 if (temp->next)
495 temp->next->prev = temp->prev;
496 if (temp->prev)
497 temp->prev->next = temp->next;
498 else
499 *list = temp->next;
501 temp->prev = temp->next = NULL;
504 /* Inserts temporary slot TEMP to LIST. */
506 static void
507 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
509 temp->next = *list;
510 if (*list)
511 (*list)->prev = temp;
512 temp->prev = NULL;
513 *list = temp;
516 /* Returns the list of used temp slots at LEVEL. */
518 static struct temp_slot **
519 temp_slots_at_level (int level)
521 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
522 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
524 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
527 /* Returns the maximal temporary slot level. */
529 static int
530 max_slot_level (void)
532 if (!used_temp_slots)
533 return -1;
535 return VEC_length (temp_slot_p, used_temp_slots) - 1;
538 /* Moves temporary slot TEMP to LEVEL. */
540 static void
541 move_slot_to_level (struct temp_slot *temp, int level)
543 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
544 insert_slot_to_list (temp, temp_slots_at_level (level));
545 temp->level = level;
548 /* Make temporary slot TEMP available. */
550 static void
551 make_slot_available (struct temp_slot *temp)
553 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
554 insert_slot_to_list (temp, &avail_temp_slots);
555 temp->in_use = 0;
556 temp->level = -1;
559 /* Allocate a temporary stack slot and record it for possible later
560 reuse.
562 MODE is the machine mode to be given to the returned rtx.
564 SIZE is the size in units of the space required. We do no rounding here
565 since assign_stack_local will do any required rounding.
567 KEEP is 1 if this slot is to be retained after a call to
568 free_temp_slots. Automatic variables for a block are allocated
569 with this flag. KEEP values of 2 or 3 were needed respectively
570 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
571 or for SAVE_EXPRs, but they are now unused.
573 TYPE is the type that will be used for the stack slot. */
576 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
577 int keep, tree type)
579 unsigned int align;
580 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
581 rtx slot;
583 /* If SIZE is -1 it means that somebody tried to allocate a temporary
584 of a variable size. */
585 gcc_assert (size != -1);
587 /* These are now unused. */
588 gcc_assert (keep <= 1);
590 align = get_stack_local_alignment (type, mode);
592 /* Try to find an available, already-allocated temporary of the proper
593 mode which meets the size and alignment requirements. Choose the
594 smallest one with the closest alignment.
596 If assign_stack_temp is called outside of the tree->rtl expansion,
597 we cannot reuse the stack slots (that may still refer to
598 VIRTUAL_STACK_VARS_REGNUM). */
599 if (!virtuals_instantiated)
601 for (p = avail_temp_slots; p; p = p->next)
603 if (p->align >= align && p->size >= size
604 && GET_MODE (p->slot) == mode
605 && objects_must_conflict_p (p->type, type)
606 && (best_p == 0 || best_p->size > p->size
607 || (best_p->size == p->size && best_p->align > p->align)))
609 if (p->align == align && p->size == size)
611 selected = p;
612 cut_slot_from_list (selected, &avail_temp_slots);
613 best_p = 0;
614 break;
616 best_p = p;
621 /* Make our best, if any, the one to use. */
622 if (best_p)
624 selected = best_p;
625 cut_slot_from_list (selected, &avail_temp_slots);
627 /* If there are enough aligned bytes left over, make them into a new
628 temp_slot so that the extra bytes don't get wasted. Do this only
629 for BLKmode slots, so that we can be sure of the alignment. */
630 if (GET_MODE (best_p->slot) == BLKmode)
632 int alignment = best_p->align / BITS_PER_UNIT;
633 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
635 if (best_p->size - rounded_size >= alignment)
637 p = GGC_NEW (struct temp_slot);
638 p->in_use = p->addr_taken = 0;
639 p->size = best_p->size - rounded_size;
640 p->base_offset = best_p->base_offset + rounded_size;
641 p->full_size = best_p->full_size - rounded_size;
642 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
643 p->align = best_p->align;
644 p->address = 0;
645 p->type = best_p->type;
646 insert_slot_to_list (p, &avail_temp_slots);
648 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
649 stack_slot_list);
651 best_p->size = rounded_size;
652 best_p->full_size = rounded_size;
657 /* If we still didn't find one, make a new temporary. */
658 if (selected == 0)
660 HOST_WIDE_INT frame_offset_old = frame_offset;
662 p = GGC_NEW (struct temp_slot);
664 /* We are passing an explicit alignment request to assign_stack_local.
665 One side effect of that is assign_stack_local will not round SIZE
666 to ensure the frame offset remains suitably aligned.
668 So for requests which depended on the rounding of SIZE, we go ahead
669 and round it now. We also make sure ALIGNMENT is at least
670 BIGGEST_ALIGNMENT. */
671 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
672 p->slot = assign_stack_local (mode,
673 (mode == BLKmode
674 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
675 : size),
676 align);
678 p->align = align;
680 /* The following slot size computation is necessary because we don't
681 know the actual size of the temporary slot until assign_stack_local
682 has performed all the frame alignment and size rounding for the
683 requested temporary. Note that extra space added for alignment
684 can be either above or below this stack slot depending on which
685 way the frame grows. We include the extra space if and only if it
686 is above this slot. */
687 if (FRAME_GROWS_DOWNWARD)
688 p->size = frame_offset_old - frame_offset;
689 else
690 p->size = size;
692 /* Now define the fields used by combine_temp_slots. */
693 if (FRAME_GROWS_DOWNWARD)
695 p->base_offset = frame_offset;
696 p->full_size = frame_offset_old - frame_offset;
698 else
700 p->base_offset = frame_offset_old;
701 p->full_size = frame_offset - frame_offset_old;
703 p->address = 0;
705 selected = p;
708 p = selected;
709 p->in_use = 1;
710 p->addr_taken = 0;
711 p->type = type;
712 p->level = temp_slot_level;
713 p->keep = keep;
715 pp = temp_slots_at_level (p->level);
716 insert_slot_to_list (p, pp);
718 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
719 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
720 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
722 /* If we know the alias set for the memory that will be used, use
723 it. If there's no TYPE, then we don't know anything about the
724 alias set for the memory. */
725 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
726 set_mem_align (slot, align);
728 /* If a type is specified, set the relevant flags. */
729 if (type != 0)
731 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
732 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
733 || TREE_CODE (type) == COMPLEX_TYPE));
735 MEM_NOTRAP_P (slot) = 1;
737 return slot;
740 /* Allocate a temporary stack slot and record it for possible later
741 reuse. First three arguments are same as in preceding function. */
744 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
746 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
749 /* Assign a temporary.
750 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
751 and so that should be used in error messages. In either case, we
752 allocate of the given type.
753 KEEP is as for assign_stack_temp.
754 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
755 it is 0 if a register is OK.
756 DONT_PROMOTE is 1 if we should not promote values in register
757 to wider modes. */
760 assign_temp (tree type_or_decl, int keep, int memory_required,
761 int dont_promote ATTRIBUTE_UNUSED)
763 tree type, decl;
764 enum machine_mode mode;
765 #ifdef PROMOTE_MODE
766 int unsignedp;
767 #endif
769 if (DECL_P (type_or_decl))
770 decl = type_or_decl, type = TREE_TYPE (decl);
771 else
772 decl = NULL, type = type_or_decl;
774 mode = TYPE_MODE (type);
775 #ifdef PROMOTE_MODE
776 unsignedp = TYPE_UNSIGNED (type);
777 #endif
779 if (mode == BLKmode || memory_required)
781 HOST_WIDE_INT size = int_size_in_bytes (type);
782 rtx tmp;
784 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
785 problems with allocating the stack space. */
786 if (size == 0)
787 size = 1;
789 /* Unfortunately, we don't yet know how to allocate variable-sized
790 temporaries. However, sometimes we can find a fixed upper limit on
791 the size, so try that instead. */
792 else if (size == -1)
793 size = max_int_size_in_bytes (type);
795 /* The size of the temporary may be too large to fit into an integer. */
796 /* ??? Not sure this should happen except for user silliness, so limit
797 this to things that aren't compiler-generated temporaries. The
798 rest of the time we'll die in assign_stack_temp_for_type. */
799 if (decl && size == -1
800 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
802 error ("size of variable %q+D is too large", decl);
803 size = 1;
806 tmp = assign_stack_temp_for_type (mode, size, keep, type);
807 return tmp;
810 #ifdef PROMOTE_MODE
811 if (! dont_promote)
812 mode = promote_mode (type, mode, &unsignedp, 0);
813 #endif
815 return gen_reg_rtx (mode);
818 /* Combine temporary stack slots which are adjacent on the stack.
820 This allows for better use of already allocated stack space. This is only
821 done for BLKmode slots because we can be sure that we won't have alignment
822 problems in this case. */
824 static void
825 combine_temp_slots (void)
827 struct temp_slot *p, *q, *next, *next_q;
828 int num_slots;
830 /* We can't combine slots, because the information about which slot
831 is in which alias set will be lost. */
832 if (flag_strict_aliasing)
833 return;
835 /* If there are a lot of temp slots, don't do anything unless
836 high levels of optimization. */
837 if (! flag_expensive_optimizations)
838 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
839 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
840 return;
842 for (p = avail_temp_slots; p; p = next)
844 int delete_p = 0;
846 next = p->next;
848 if (GET_MODE (p->slot) != BLKmode)
849 continue;
851 for (q = p->next; q; q = next_q)
853 int delete_q = 0;
855 next_q = q->next;
857 if (GET_MODE (q->slot) != BLKmode)
858 continue;
860 if (p->base_offset + p->full_size == q->base_offset)
862 /* Q comes after P; combine Q into P. */
863 p->size += q->size;
864 p->full_size += q->full_size;
865 delete_q = 1;
867 else if (q->base_offset + q->full_size == p->base_offset)
869 /* P comes after Q; combine P into Q. */
870 q->size += p->size;
871 q->full_size += p->full_size;
872 delete_p = 1;
873 break;
875 if (delete_q)
876 cut_slot_from_list (q, &avail_temp_slots);
879 /* Either delete P or advance past it. */
880 if (delete_p)
881 cut_slot_from_list (p, &avail_temp_slots);
885 /* Find the temp slot corresponding to the object at address X. */
887 static struct temp_slot *
888 find_temp_slot_from_address (rtx x)
890 struct temp_slot *p;
891 rtx next;
892 int i;
894 for (i = max_slot_level (); i >= 0; i--)
895 for (p = *temp_slots_at_level (i); p; p = p->next)
897 if (XEXP (p->slot, 0) == x
898 || p->address == x
899 || (GET_CODE (x) == PLUS
900 && XEXP (x, 0) == virtual_stack_vars_rtx
901 && GET_CODE (XEXP (x, 1)) == CONST_INT
902 && INTVAL (XEXP (x, 1)) >= p->base_offset
903 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
904 return p;
906 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
907 for (next = p->address; next; next = XEXP (next, 1))
908 if (XEXP (next, 0) == x)
909 return p;
912 /* If we have a sum involving a register, see if it points to a temp
913 slot. */
914 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
915 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
916 return p;
917 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
918 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
919 return p;
921 return 0;
924 /* Indicate that NEW_RTX is an alternate way of referring to the temp
925 slot that previously was known by OLD_RTX. */
927 void
928 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
930 struct temp_slot *p;
932 if (rtx_equal_p (old_rtx, new_rtx))
933 return;
935 p = find_temp_slot_from_address (old_rtx);
937 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
938 NEW_RTX is a register, see if one operand of the PLUS is a
939 temporary location. If so, NEW_RTX points into it. Otherwise,
940 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
941 in common between them. If so, try a recursive call on those
942 values. */
943 if (p == 0)
945 if (GET_CODE (old_rtx) != PLUS)
946 return;
948 if (REG_P (new_rtx))
950 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
951 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
952 return;
954 else if (GET_CODE (new_rtx) != PLUS)
955 return;
957 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
958 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
959 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
960 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
961 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
962 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
963 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
964 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
966 return;
969 /* Otherwise add an alias for the temp's address. */
970 else if (p->address == 0)
971 p->address = new_rtx;
972 else
974 if (GET_CODE (p->address) != EXPR_LIST)
975 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
977 p->address = gen_rtx_EXPR_LIST (VOIDmode, new_rtx, p->address);
981 /* If X could be a reference to a temporary slot, mark the fact that its
982 address was taken. */
984 void
985 mark_temp_addr_taken (rtx x)
987 struct temp_slot *p;
989 if (x == 0)
990 return;
992 /* If X is not in memory or is at a constant address, it cannot be in
993 a temporary slot. */
994 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
995 return;
997 p = find_temp_slot_from_address (XEXP (x, 0));
998 if (p != 0)
999 p->addr_taken = 1;
1002 /* If X could be a reference to a temporary slot, mark that slot as
1003 belonging to the to one level higher than the current level. If X
1004 matched one of our slots, just mark that one. Otherwise, we can't
1005 easily predict which it is, so upgrade all of them. Kept slots
1006 need not be touched.
1008 This is called when an ({...}) construct occurs and a statement
1009 returns a value in memory. */
1011 void
1012 preserve_temp_slots (rtx x)
1014 struct temp_slot *p = 0, *next;
1016 /* If there is no result, we still might have some objects whose address
1017 were taken, so we need to make sure they stay around. */
1018 if (x == 0)
1020 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1022 next = p->next;
1024 if (p->addr_taken)
1025 move_slot_to_level (p, temp_slot_level - 1);
1028 return;
1031 /* If X is a register that is being used as a pointer, see if we have
1032 a temporary slot we know it points to. To be consistent with
1033 the code below, we really should preserve all non-kept slots
1034 if we can't find a match, but that seems to be much too costly. */
1035 if (REG_P (x) && REG_POINTER (x))
1036 p = find_temp_slot_from_address (x);
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot, but it can contain something whose address was
1040 taken. */
1041 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1043 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1045 next = p->next;
1047 if (p->addr_taken)
1048 move_slot_to_level (p, temp_slot_level - 1);
1051 return;
1054 /* First see if we can find a match. */
1055 if (p == 0)
1056 p = find_temp_slot_from_address (XEXP (x, 0));
1058 if (p != 0)
1060 /* Move everything at our level whose address was taken to our new
1061 level in case we used its address. */
1062 struct temp_slot *q;
1064 if (p->level == temp_slot_level)
1066 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1068 next = q->next;
1070 if (p != q && q->addr_taken)
1071 move_slot_to_level (q, temp_slot_level - 1);
1074 move_slot_to_level (p, temp_slot_level - 1);
1075 p->addr_taken = 0;
1077 return;
1080 /* Otherwise, preserve all non-kept slots at this level. */
1081 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1083 next = p->next;
1085 if (!p->keep)
1086 move_slot_to_level (p, temp_slot_level - 1);
1090 /* Free all temporaries used so far. This is normally called at the
1091 end of generating code for a statement. */
1093 void
1094 free_temp_slots (void)
1096 struct temp_slot *p, *next;
1098 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1100 next = p->next;
1102 if (!p->keep)
1103 make_slot_available (p);
1106 combine_temp_slots ();
1109 /* Push deeper into the nesting level for stack temporaries. */
1111 void
1112 push_temp_slots (void)
1114 temp_slot_level++;
1117 /* Pop a temporary nesting level. All slots in use in the current level
1118 are freed. */
1120 void
1121 pop_temp_slots (void)
1123 struct temp_slot *p, *next;
1125 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1127 next = p->next;
1128 make_slot_available (p);
1131 combine_temp_slots ();
1133 temp_slot_level--;
1136 /* Initialize temporary slots. */
1138 void
1139 init_temp_slots (void)
1141 /* We have not allocated any temporaries yet. */
1142 avail_temp_slots = 0;
1143 used_temp_slots = 0;
1144 temp_slot_level = 0;
1147 /* These routines are responsible for converting virtual register references
1148 to the actual hard register references once RTL generation is complete.
1150 The following four variables are used for communication between the
1151 routines. They contain the offsets of the virtual registers from their
1152 respective hard registers. */
1154 static int in_arg_offset;
1155 static int var_offset;
1156 static int dynamic_offset;
1157 static int out_arg_offset;
1158 static int cfa_offset;
1160 /* In most machines, the stack pointer register is equivalent to the bottom
1161 of the stack. */
1163 #ifndef STACK_POINTER_OFFSET
1164 #define STACK_POINTER_OFFSET 0
1165 #endif
1167 /* If not defined, pick an appropriate default for the offset of dynamically
1168 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1169 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1171 #ifndef STACK_DYNAMIC_OFFSET
1173 /* The bottom of the stack points to the actual arguments. If
1174 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1175 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1176 stack space for register parameters is not pushed by the caller, but
1177 rather part of the fixed stack areas and hence not included in
1178 `crtl->outgoing_args_size'. Nevertheless, we must allow
1179 for it when allocating stack dynamic objects. */
1181 #if defined(REG_PARM_STACK_SPACE)
1182 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1183 ((ACCUMULATE_OUTGOING_ARGS \
1184 ? (crtl->outgoing_args_size \
1185 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1186 : REG_PARM_STACK_SPACE (FNDECL))) \
1187 : 0) + (STACK_POINTER_OFFSET))
1188 #else
1189 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1190 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1191 + (STACK_POINTER_OFFSET))
1192 #endif
1193 #endif
1196 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1197 is a virtual register, return the equivalent hard register and set the
1198 offset indirectly through the pointer. Otherwise, return 0. */
1200 static rtx
1201 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1203 rtx new_rtx;
1204 HOST_WIDE_INT offset;
1206 if (x == virtual_incoming_args_rtx)
1208 if (stack_realign_drap)
1210 /* Replace virtual_incoming_args_rtx with internal arg
1211 pointer if DRAP is used to realign stack. */
1212 new_rtx = crtl->args.internal_arg_pointer;
1213 offset = 0;
1215 else
1216 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1218 else if (x == virtual_stack_vars_rtx)
1219 new_rtx = frame_pointer_rtx, offset = var_offset;
1220 else if (x == virtual_stack_dynamic_rtx)
1221 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1222 else if (x == virtual_outgoing_args_rtx)
1223 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1224 else if (x == virtual_cfa_rtx)
1226 #ifdef FRAME_POINTER_CFA_OFFSET
1227 new_rtx = frame_pointer_rtx;
1228 #else
1229 new_rtx = arg_pointer_rtx;
1230 #endif
1231 offset = cfa_offset;
1233 else
1234 return NULL_RTX;
1236 *poffset = offset;
1237 return new_rtx;
1240 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1241 Instantiate any virtual registers present inside of *LOC. The expression
1242 is simplified, as much as possible, but is not to be considered "valid"
1243 in any sense implied by the target. If any change is made, set CHANGED
1244 to true. */
1246 static int
1247 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1249 HOST_WIDE_INT offset;
1250 bool *changed = (bool *) data;
1251 rtx x, new_rtx;
1253 x = *loc;
1254 if (x == 0)
1255 return 0;
1257 switch (GET_CODE (x))
1259 case REG:
1260 new_rtx = instantiate_new_reg (x, &offset);
1261 if (new_rtx)
1263 *loc = plus_constant (new_rtx, offset);
1264 if (changed)
1265 *changed = true;
1267 return -1;
1269 case PLUS:
1270 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1271 if (new_rtx)
1273 new_rtx = plus_constant (new_rtx, offset);
1274 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1275 if (changed)
1276 *changed = true;
1277 return -1;
1280 /* FIXME -- from old code */
1281 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1282 we can commute the PLUS and SUBREG because pointers into the
1283 frame are well-behaved. */
1284 break;
1286 default:
1287 break;
1290 return 0;
1293 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1294 matches the predicate for insn CODE operand OPERAND. */
1296 static int
1297 safe_insn_predicate (int code, int operand, rtx x)
1299 const struct insn_operand_data *op_data;
1301 if (code < 0)
1302 return true;
1304 op_data = &insn_data[code].operand[operand];
1305 if (op_data->predicate == NULL)
1306 return true;
1308 return op_data->predicate (x, op_data->mode);
1311 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1312 registers present inside of insn. The result will be a valid insn. */
1314 static void
1315 instantiate_virtual_regs_in_insn (rtx insn)
1317 HOST_WIDE_INT offset;
1318 int insn_code, i;
1319 bool any_change = false;
1320 rtx set, new_rtx, x, seq;
1322 /* There are some special cases to be handled first. */
1323 set = single_set (insn);
1324 if (set)
1326 /* We're allowed to assign to a virtual register. This is interpreted
1327 to mean that the underlying register gets assigned the inverse
1328 transformation. This is used, for example, in the handling of
1329 non-local gotos. */
1330 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1331 if (new_rtx)
1333 start_sequence ();
1335 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1336 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1337 GEN_INT (-offset));
1338 x = force_operand (x, new_rtx);
1339 if (x != new_rtx)
1340 emit_move_insn (new_rtx, x);
1342 seq = get_insns ();
1343 end_sequence ();
1345 emit_insn_before (seq, insn);
1346 delete_insn (insn);
1347 return;
1350 /* Handle a straight copy from a virtual register by generating a
1351 new add insn. The difference between this and falling through
1352 to the generic case is avoiding a new pseudo and eliminating a
1353 move insn in the initial rtl stream. */
1354 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1355 if (new_rtx && offset != 0
1356 && REG_P (SET_DEST (set))
1357 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1359 start_sequence ();
1361 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1362 new_rtx, GEN_INT (offset), SET_DEST (set),
1363 1, OPTAB_LIB_WIDEN);
1364 if (x != SET_DEST (set))
1365 emit_move_insn (SET_DEST (set), x);
1367 seq = get_insns ();
1368 end_sequence ();
1370 emit_insn_before (seq, insn);
1371 delete_insn (insn);
1372 return;
1375 extract_insn (insn);
1376 insn_code = INSN_CODE (insn);
1378 /* Handle a plus involving a virtual register by determining if the
1379 operands remain valid if they're modified in place. */
1380 if (GET_CODE (SET_SRC (set)) == PLUS
1381 && recog_data.n_operands >= 3
1382 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1383 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1384 && GET_CODE (recog_data.operand[2]) == CONST_INT
1385 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1387 offset += INTVAL (recog_data.operand[2]);
1389 /* If the sum is zero, then replace with a plain move. */
1390 if (offset == 0
1391 && REG_P (SET_DEST (set))
1392 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1394 start_sequence ();
1395 emit_move_insn (SET_DEST (set), new_rtx);
1396 seq = get_insns ();
1397 end_sequence ();
1399 emit_insn_before (seq, insn);
1400 delete_insn (insn);
1401 return;
1404 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1406 /* Using validate_change and apply_change_group here leaves
1407 recog_data in an invalid state. Since we know exactly what
1408 we want to check, do those two by hand. */
1409 if (safe_insn_predicate (insn_code, 1, new_rtx)
1410 && safe_insn_predicate (insn_code, 2, x))
1412 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1413 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1414 any_change = true;
1416 /* Fall through into the regular operand fixup loop in
1417 order to take care of operands other than 1 and 2. */
1421 else
1423 extract_insn (insn);
1424 insn_code = INSN_CODE (insn);
1427 /* In the general case, we expect virtual registers to appear only in
1428 operands, and then only as either bare registers or inside memories. */
1429 for (i = 0; i < recog_data.n_operands; ++i)
1431 x = recog_data.operand[i];
1432 switch (GET_CODE (x))
1434 case MEM:
1436 rtx addr = XEXP (x, 0);
1437 bool changed = false;
1439 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1440 if (!changed)
1441 continue;
1443 start_sequence ();
1444 x = replace_equiv_address (x, addr);
1445 /* It may happen that the address with the virtual reg
1446 was valid (e.g. based on the virtual stack reg, which might
1447 be acceptable to the predicates with all offsets), whereas
1448 the address now isn't anymore, for instance when the address
1449 is still offsetted, but the base reg isn't virtual-stack-reg
1450 anymore. Below we would do a force_reg on the whole operand,
1451 but this insn might actually only accept memory. Hence,
1452 before doing that last resort, try to reload the address into
1453 a register, so this operand stays a MEM. */
1454 if (!safe_insn_predicate (insn_code, i, x))
1456 addr = force_reg (GET_MODE (addr), addr);
1457 x = replace_equiv_address (x, addr);
1459 seq = get_insns ();
1460 end_sequence ();
1461 if (seq)
1462 emit_insn_before (seq, insn);
1464 break;
1466 case REG:
1467 new_rtx = instantiate_new_reg (x, &offset);
1468 if (new_rtx == NULL)
1469 continue;
1470 if (offset == 0)
1471 x = new_rtx;
1472 else
1474 start_sequence ();
1476 /* Careful, special mode predicates may have stuff in
1477 insn_data[insn_code].operand[i].mode that isn't useful
1478 to us for computing a new value. */
1479 /* ??? Recognize address_operand and/or "p" constraints
1480 to see if (plus new offset) is a valid before we put
1481 this through expand_simple_binop. */
1482 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1483 GEN_INT (offset), NULL_RTX,
1484 1, OPTAB_LIB_WIDEN);
1485 seq = get_insns ();
1486 end_sequence ();
1487 emit_insn_before (seq, insn);
1489 break;
1491 case SUBREG:
1492 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1493 if (new_rtx == NULL)
1494 continue;
1495 if (offset != 0)
1497 start_sequence ();
1498 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1499 GEN_INT (offset), NULL_RTX,
1500 1, OPTAB_LIB_WIDEN);
1501 seq = get_insns ();
1502 end_sequence ();
1503 emit_insn_before (seq, insn);
1505 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1506 GET_MODE (new_rtx), SUBREG_BYTE (x));
1507 gcc_assert (x);
1508 break;
1510 default:
1511 continue;
1514 /* At this point, X contains the new value for the operand.
1515 Validate the new value vs the insn predicate. Note that
1516 asm insns will have insn_code -1 here. */
1517 if (!safe_insn_predicate (insn_code, i, x))
1519 start_sequence ();
1520 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1521 seq = get_insns ();
1522 end_sequence ();
1523 if (seq)
1524 emit_insn_before (seq, insn);
1527 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1528 any_change = true;
1531 if (any_change)
1533 /* Propagate operand changes into the duplicates. */
1534 for (i = 0; i < recog_data.n_dups; ++i)
1535 *recog_data.dup_loc[i]
1536 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1538 /* Force re-recognition of the instruction for validation. */
1539 INSN_CODE (insn) = -1;
1542 if (asm_noperands (PATTERN (insn)) >= 0)
1544 if (!check_asm_operands (PATTERN (insn)))
1546 error_for_asm (insn, "impossible constraint in %<asm%>");
1547 delete_insn (insn);
1550 else
1552 if (recog_memoized (insn) < 0)
1553 fatal_insn_not_found (insn);
1557 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1558 do any instantiation required. */
1560 void
1561 instantiate_decl_rtl (rtx x)
1563 rtx addr;
1565 if (x == 0)
1566 return;
1568 /* If this is a CONCAT, recurse for the pieces. */
1569 if (GET_CODE (x) == CONCAT)
1571 instantiate_decl_rtl (XEXP (x, 0));
1572 instantiate_decl_rtl (XEXP (x, 1));
1573 return;
1576 /* If this is not a MEM, no need to do anything. Similarly if the
1577 address is a constant or a register that is not a virtual register. */
1578 if (!MEM_P (x))
1579 return;
1581 addr = XEXP (x, 0);
1582 if (CONSTANT_P (addr)
1583 || (REG_P (addr)
1584 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1585 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1586 return;
1588 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1591 /* Helper for instantiate_decls called via walk_tree: Process all decls
1592 in the given DECL_VALUE_EXPR. */
1594 static tree
1595 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1597 tree t = *tp;
1598 if (! EXPR_P (t))
1600 *walk_subtrees = 0;
1601 if (DECL_P (t) && DECL_RTL_SET_P (t))
1602 instantiate_decl_rtl (DECL_RTL (t));
1604 return NULL;
1607 /* Subroutine of instantiate_decls: Process all decls in the given
1608 BLOCK node and all its subblocks. */
1610 static void
1611 instantiate_decls_1 (tree let)
1613 tree t;
1615 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1617 if (DECL_RTL_SET_P (t))
1618 instantiate_decl_rtl (DECL_RTL (t));
1619 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1621 tree v = DECL_VALUE_EXPR (t);
1622 walk_tree (&v, instantiate_expr, NULL, NULL);
1626 /* Process all subblocks. */
1627 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1628 instantiate_decls_1 (t);
1631 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1632 all virtual registers in their DECL_RTL's. */
1634 static void
1635 instantiate_decls (tree fndecl)
1637 tree decl, t, next;
1639 /* Process all parameters of the function. */
1640 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1642 instantiate_decl_rtl (DECL_RTL (decl));
1643 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1644 if (DECL_HAS_VALUE_EXPR_P (decl))
1646 tree v = DECL_VALUE_EXPR (decl);
1647 walk_tree (&v, instantiate_expr, NULL, NULL);
1651 /* Now process all variables defined in the function or its subblocks. */
1652 instantiate_decls_1 (DECL_INITIAL (fndecl));
1654 t = cfun->local_decls;
1655 cfun->local_decls = NULL_TREE;
1656 for (; t; t = next)
1658 next = TREE_CHAIN (t);
1659 decl = TREE_VALUE (t);
1660 if (DECL_RTL_SET_P (decl))
1661 instantiate_decl_rtl (DECL_RTL (decl));
1662 ggc_free (t);
1666 /* Pass through the INSNS of function FNDECL and convert virtual register
1667 references to hard register references. */
1669 static unsigned int
1670 instantiate_virtual_regs (void)
1672 rtx insn;
1674 /* Compute the offsets to use for this function. */
1675 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1676 var_offset = STARTING_FRAME_OFFSET;
1677 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1678 out_arg_offset = STACK_POINTER_OFFSET;
1679 #ifdef FRAME_POINTER_CFA_OFFSET
1680 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1681 #else
1682 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1683 #endif
1685 /* Initialize recognition, indicating that volatile is OK. */
1686 init_recog ();
1688 /* Scan through all the insns, instantiating every virtual register still
1689 present. */
1690 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1691 if (INSN_P (insn))
1693 /* These patterns in the instruction stream can never be recognized.
1694 Fortunately, they shouldn't contain virtual registers either. */
1695 if (GET_CODE (PATTERN (insn)) == USE
1696 || GET_CODE (PATTERN (insn)) == CLOBBER
1697 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1698 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1699 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1700 continue;
1702 instantiate_virtual_regs_in_insn (insn);
1704 if (INSN_DELETED_P (insn))
1705 continue;
1707 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1709 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1710 if (GET_CODE (insn) == CALL_INSN)
1711 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1712 instantiate_virtual_regs_in_rtx, NULL);
1715 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1716 instantiate_decls (current_function_decl);
1718 targetm.instantiate_decls ();
1720 /* Indicate that, from now on, assign_stack_local should use
1721 frame_pointer_rtx. */
1722 virtuals_instantiated = 1;
1723 return 0;
1726 struct rtl_opt_pass pass_instantiate_virtual_regs =
1729 RTL_PASS,
1730 "vregs", /* name */
1731 NULL, /* gate */
1732 instantiate_virtual_regs, /* execute */
1733 NULL, /* sub */
1734 NULL, /* next */
1735 0, /* static_pass_number */
1736 0, /* tv_id */
1737 0, /* properties_required */
1738 0, /* properties_provided */
1739 0, /* properties_destroyed */
1740 0, /* todo_flags_start */
1741 TODO_dump_func /* todo_flags_finish */
1746 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1747 This means a type for which function calls must pass an address to the
1748 function or get an address back from the function.
1749 EXP may be a type node or an expression (whose type is tested). */
1752 aggregate_value_p (const_tree exp, const_tree fntype)
1754 int i, regno, nregs;
1755 rtx reg;
1757 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1759 /* DECL node associated with FNTYPE when relevant, which we might need to
1760 check for by-invisible-reference returns, typically for CALL_EXPR input
1761 EXPressions. */
1762 const_tree fndecl = NULL_TREE;
1764 if (fntype)
1765 switch (TREE_CODE (fntype))
1767 case CALL_EXPR:
1768 fndecl = get_callee_fndecl (fntype);
1769 fntype = (fndecl
1770 ? TREE_TYPE (fndecl)
1771 : TREE_TYPE (CALL_EXPR_FN (fntype)));
1772 break;
1773 case FUNCTION_DECL:
1774 fndecl = fntype;
1775 fntype = TREE_TYPE (fndecl);
1776 break;
1777 case FUNCTION_TYPE:
1778 case METHOD_TYPE:
1779 break;
1780 case IDENTIFIER_NODE:
1781 fntype = 0;
1782 break;
1783 default:
1784 /* We don't expect other rtl types here. */
1785 gcc_unreachable ();
1788 if (TREE_CODE (type) == VOID_TYPE)
1789 return 0;
1791 /* If the front end has decided that this needs to be passed by
1792 reference, do so. */
1793 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1794 && DECL_BY_REFERENCE (exp))
1795 return 1;
1797 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1798 called function RESULT_DECL, meaning the function returns in memory by
1799 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1800 on the function type, which used to be the way to request such a return
1801 mechanism but might now be causing troubles at gimplification time if
1802 temporaries with the function type need to be created. */
1803 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1804 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1805 return 1;
1807 if (targetm.calls.return_in_memory (type, fntype))
1808 return 1;
1809 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1810 and thus can't be returned in registers. */
1811 if (TREE_ADDRESSABLE (type))
1812 return 1;
1813 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1814 return 1;
1815 /* Make sure we have suitable call-clobbered regs to return
1816 the value in; if not, we must return it in memory. */
1817 reg = hard_function_value (type, 0, fntype, 0);
1819 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1820 it is OK. */
1821 if (!REG_P (reg))
1822 return 0;
1824 regno = REGNO (reg);
1825 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1826 for (i = 0; i < nregs; i++)
1827 if (! call_used_regs[regno + i])
1828 return 1;
1829 return 0;
1832 /* Return true if we should assign DECL a pseudo register; false if it
1833 should live on the local stack. */
1835 bool
1836 use_register_for_decl (const_tree decl)
1838 if (!targetm.calls.allocate_stack_slots_for_args())
1839 return true;
1841 /* Honor volatile. */
1842 if (TREE_SIDE_EFFECTS (decl))
1843 return false;
1845 /* Honor addressability. */
1846 if (TREE_ADDRESSABLE (decl))
1847 return false;
1849 /* Only register-like things go in registers. */
1850 if (DECL_MODE (decl) == BLKmode)
1851 return false;
1853 /* If -ffloat-store specified, don't put explicit float variables
1854 into registers. */
1855 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1856 propagates values across these stores, and it probably shouldn't. */
1857 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1858 return false;
1860 /* If we're not interested in tracking debugging information for
1861 this decl, then we can certainly put it in a register. */
1862 if (DECL_IGNORED_P (decl))
1863 return true;
1865 return (optimize || DECL_REGISTER (decl));
1868 /* Return true if TYPE should be passed by invisible reference. */
1870 bool
1871 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1872 tree type, bool named_arg)
1874 if (type)
1876 /* If this type contains non-trivial constructors, then it is
1877 forbidden for the middle-end to create any new copies. */
1878 if (TREE_ADDRESSABLE (type))
1879 return true;
1881 /* GCC post 3.4 passes *all* variable sized types by reference. */
1882 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1883 return true;
1886 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1889 /* Return true if TYPE, which is passed by reference, should be callee
1890 copied instead of caller copied. */
1892 bool
1893 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1894 tree type, bool named_arg)
1896 if (type && TREE_ADDRESSABLE (type))
1897 return false;
1898 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1901 /* Structures to communicate between the subroutines of assign_parms.
1902 The first holds data persistent across all parameters, the second
1903 is cleared out for each parameter. */
1905 struct assign_parm_data_all
1907 CUMULATIVE_ARGS args_so_far;
1908 struct args_size stack_args_size;
1909 tree function_result_decl;
1910 tree orig_fnargs;
1911 rtx first_conversion_insn;
1912 rtx last_conversion_insn;
1913 HOST_WIDE_INT pretend_args_size;
1914 HOST_WIDE_INT extra_pretend_bytes;
1915 int reg_parm_stack_space;
1918 struct assign_parm_data_one
1920 tree nominal_type;
1921 tree passed_type;
1922 rtx entry_parm;
1923 rtx stack_parm;
1924 enum machine_mode nominal_mode;
1925 enum machine_mode passed_mode;
1926 enum machine_mode promoted_mode;
1927 struct locate_and_pad_arg_data locate;
1928 int partial;
1929 BOOL_BITFIELD named_arg : 1;
1930 BOOL_BITFIELD passed_pointer : 1;
1931 BOOL_BITFIELD on_stack : 1;
1932 BOOL_BITFIELD loaded_in_reg : 1;
1935 /* A subroutine of assign_parms. Initialize ALL. */
1937 static void
1938 assign_parms_initialize_all (struct assign_parm_data_all *all)
1940 tree fntype;
1942 memset (all, 0, sizeof (*all));
1944 fntype = TREE_TYPE (current_function_decl);
1946 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1947 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1948 #else
1949 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1950 current_function_decl, -1);
1951 #endif
1953 #ifdef REG_PARM_STACK_SPACE
1954 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1955 #endif
1958 /* If ARGS contains entries with complex types, split the entry into two
1959 entries of the component type. Return a new list of substitutions are
1960 needed, else the old list. */
1962 static tree
1963 split_complex_args (tree args)
1965 tree p;
1967 /* Before allocating memory, check for the common case of no complex. */
1968 for (p = args; p; p = TREE_CHAIN (p))
1970 tree type = TREE_TYPE (p);
1971 if (TREE_CODE (type) == COMPLEX_TYPE
1972 && targetm.calls.split_complex_arg (type))
1973 goto found;
1975 return args;
1977 found:
1978 args = copy_list (args);
1980 for (p = args; p; p = TREE_CHAIN (p))
1982 tree type = TREE_TYPE (p);
1983 if (TREE_CODE (type) == COMPLEX_TYPE
1984 && targetm.calls.split_complex_arg (type))
1986 tree decl;
1987 tree subtype = TREE_TYPE (type);
1988 bool addressable = TREE_ADDRESSABLE (p);
1990 /* Rewrite the PARM_DECL's type with its component. */
1991 TREE_TYPE (p) = subtype;
1992 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1993 DECL_MODE (p) = VOIDmode;
1994 DECL_SIZE (p) = NULL;
1995 DECL_SIZE_UNIT (p) = NULL;
1996 /* If this arg must go in memory, put it in a pseudo here.
1997 We can't allow it to go in memory as per normal parms,
1998 because the usual place might not have the imag part
1999 adjacent to the real part. */
2000 DECL_ARTIFICIAL (p) = addressable;
2001 DECL_IGNORED_P (p) = addressable;
2002 TREE_ADDRESSABLE (p) = 0;
2003 layout_decl (p, 0);
2005 /* Build a second synthetic decl. */
2006 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2007 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2008 DECL_ARTIFICIAL (decl) = addressable;
2009 DECL_IGNORED_P (decl) = addressable;
2010 layout_decl (decl, 0);
2012 /* Splice it in; skip the new decl. */
2013 TREE_CHAIN (decl) = TREE_CHAIN (p);
2014 TREE_CHAIN (p) = decl;
2015 p = decl;
2019 return args;
2022 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2023 the hidden struct return argument, and (abi willing) complex args.
2024 Return the new parameter list. */
2026 static tree
2027 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2029 tree fndecl = current_function_decl;
2030 tree fntype = TREE_TYPE (fndecl);
2031 tree fnargs = DECL_ARGUMENTS (fndecl);
2033 /* If struct value address is treated as the first argument, make it so. */
2034 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2035 && ! cfun->returns_pcc_struct
2036 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2038 tree type = build_pointer_type (TREE_TYPE (fntype));
2039 tree decl;
2041 decl = build_decl (PARM_DECL, NULL_TREE, type);
2042 DECL_ARG_TYPE (decl) = type;
2043 DECL_ARTIFICIAL (decl) = 1;
2044 DECL_IGNORED_P (decl) = 1;
2046 TREE_CHAIN (decl) = fnargs;
2047 fnargs = decl;
2048 all->function_result_decl = decl;
2051 all->orig_fnargs = fnargs;
2053 /* If the target wants to split complex arguments into scalars, do so. */
2054 if (targetm.calls.split_complex_arg)
2055 fnargs = split_complex_args (fnargs);
2057 return fnargs;
2060 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2061 data for the parameter. Incorporate ABI specifics such as pass-by-
2062 reference and type promotion. */
2064 static void
2065 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2066 struct assign_parm_data_one *data)
2068 tree nominal_type, passed_type;
2069 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2071 memset (data, 0, sizeof (*data));
2073 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2074 if (!cfun->stdarg)
2075 data->named_arg = 1; /* No variadic parms. */
2076 else if (TREE_CHAIN (parm))
2077 data->named_arg = 1; /* Not the last non-variadic parm. */
2078 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2079 data->named_arg = 1; /* Only variadic ones are unnamed. */
2080 else
2081 data->named_arg = 0; /* Treat as variadic. */
2083 nominal_type = TREE_TYPE (parm);
2084 passed_type = DECL_ARG_TYPE (parm);
2086 /* Look out for errors propagating this far. Also, if the parameter's
2087 type is void then its value doesn't matter. */
2088 if (TREE_TYPE (parm) == error_mark_node
2089 /* This can happen after weird syntax errors
2090 or if an enum type is defined among the parms. */
2091 || TREE_CODE (parm) != PARM_DECL
2092 || passed_type == NULL
2093 || VOID_TYPE_P (nominal_type))
2095 nominal_type = passed_type = void_type_node;
2096 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2097 goto egress;
2100 /* Find mode of arg as it is passed, and mode of arg as it should be
2101 during execution of this function. */
2102 passed_mode = TYPE_MODE (passed_type);
2103 nominal_mode = TYPE_MODE (nominal_type);
2105 /* If the parm is to be passed as a transparent union, use the type of
2106 the first field for the tests below. We have already verified that
2107 the modes are the same. */
2108 if (TREE_CODE (passed_type) == UNION_TYPE
2109 && TYPE_TRANSPARENT_UNION (passed_type))
2110 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2112 /* See if this arg was passed by invisible reference. */
2113 if (pass_by_reference (&all->args_so_far, passed_mode,
2114 passed_type, data->named_arg))
2116 passed_type = nominal_type = build_pointer_type (passed_type);
2117 data->passed_pointer = true;
2118 passed_mode = nominal_mode = Pmode;
2121 /* Find mode as it is passed by the ABI. */
2122 promoted_mode = passed_mode;
2123 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2125 int unsignedp = TYPE_UNSIGNED (passed_type);
2126 promoted_mode = promote_mode (passed_type, promoted_mode,
2127 &unsignedp, 1);
2130 egress:
2131 data->nominal_type = nominal_type;
2132 data->passed_type = passed_type;
2133 data->nominal_mode = nominal_mode;
2134 data->passed_mode = passed_mode;
2135 data->promoted_mode = promoted_mode;
2138 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2140 static void
2141 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2142 struct assign_parm_data_one *data, bool no_rtl)
2144 int varargs_pretend_bytes = 0;
2146 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2147 data->promoted_mode,
2148 data->passed_type,
2149 &varargs_pretend_bytes, no_rtl);
2151 /* If the back-end has requested extra stack space, record how much is
2152 needed. Do not change pretend_args_size otherwise since it may be
2153 nonzero from an earlier partial argument. */
2154 if (varargs_pretend_bytes > 0)
2155 all->pretend_args_size = varargs_pretend_bytes;
2158 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2159 the incoming location of the current parameter. */
2161 static void
2162 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2163 struct assign_parm_data_one *data)
2165 HOST_WIDE_INT pretend_bytes = 0;
2166 rtx entry_parm;
2167 bool in_regs;
2169 if (data->promoted_mode == VOIDmode)
2171 data->entry_parm = data->stack_parm = const0_rtx;
2172 return;
2175 #ifdef FUNCTION_INCOMING_ARG
2176 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2177 data->passed_type, data->named_arg);
2178 #else
2179 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2180 data->passed_type, data->named_arg);
2181 #endif
2183 if (entry_parm == 0)
2184 data->promoted_mode = data->passed_mode;
2186 /* Determine parm's home in the stack, in case it arrives in the stack
2187 or we should pretend it did. Compute the stack position and rtx where
2188 the argument arrives and its size.
2190 There is one complexity here: If this was a parameter that would
2191 have been passed in registers, but wasn't only because it is
2192 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2193 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2194 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2195 as it was the previous time. */
2196 in_regs = entry_parm != 0;
2197 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2198 in_regs = true;
2199 #endif
2200 if (!in_regs && !data->named_arg)
2202 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2204 rtx tem;
2205 #ifdef FUNCTION_INCOMING_ARG
2206 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2207 data->passed_type, true);
2208 #else
2209 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2210 data->passed_type, true);
2211 #endif
2212 in_regs = tem != NULL;
2216 /* If this parameter was passed both in registers and in the stack, use
2217 the copy on the stack. */
2218 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2219 data->passed_type))
2220 entry_parm = 0;
2222 if (entry_parm)
2224 int partial;
2226 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2227 data->promoted_mode,
2228 data->passed_type,
2229 data->named_arg);
2230 data->partial = partial;
2232 /* The caller might already have allocated stack space for the
2233 register parameters. */
2234 if (partial != 0 && all->reg_parm_stack_space == 0)
2236 /* Part of this argument is passed in registers and part
2237 is passed on the stack. Ask the prologue code to extend
2238 the stack part so that we can recreate the full value.
2240 PRETEND_BYTES is the size of the registers we need to store.
2241 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2242 stack space that the prologue should allocate.
2244 Internally, gcc assumes that the argument pointer is aligned
2245 to STACK_BOUNDARY bits. This is used both for alignment
2246 optimizations (see init_emit) and to locate arguments that are
2247 aligned to more than PARM_BOUNDARY bits. We must preserve this
2248 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2249 a stack boundary. */
2251 /* We assume at most one partial arg, and it must be the first
2252 argument on the stack. */
2253 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2255 pretend_bytes = partial;
2256 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2258 /* We want to align relative to the actual stack pointer, so
2259 don't include this in the stack size until later. */
2260 all->extra_pretend_bytes = all->pretend_args_size;
2264 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2265 entry_parm ? data->partial : 0, current_function_decl,
2266 &all->stack_args_size, &data->locate);
2268 /* Update parm_stack_boundary if this parameter is passed in the
2269 stack. */
2270 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2271 crtl->parm_stack_boundary = data->locate.boundary;
2273 /* Adjust offsets to include the pretend args. */
2274 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2275 data->locate.slot_offset.constant += pretend_bytes;
2276 data->locate.offset.constant += pretend_bytes;
2278 data->entry_parm = entry_parm;
2281 /* A subroutine of assign_parms. If there is actually space on the stack
2282 for this parm, count it in stack_args_size and return true. */
2284 static bool
2285 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2286 struct assign_parm_data_one *data)
2288 /* Trivially true if we've no incoming register. */
2289 if (data->entry_parm == NULL)
2291 /* Also true if we're partially in registers and partially not,
2292 since we've arranged to drop the entire argument on the stack. */
2293 else if (data->partial != 0)
2295 /* Also true if the target says that it's passed in both registers
2296 and on the stack. */
2297 else if (GET_CODE (data->entry_parm) == PARALLEL
2298 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2300 /* Also true if the target says that there's stack allocated for
2301 all register parameters. */
2302 else if (all->reg_parm_stack_space > 0)
2304 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2305 else
2306 return false;
2308 all->stack_args_size.constant += data->locate.size.constant;
2309 if (data->locate.size.var)
2310 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2312 return true;
2315 /* A subroutine of assign_parms. Given that this parameter is allocated
2316 stack space by the ABI, find it. */
2318 static void
2319 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2321 rtx offset_rtx, stack_parm;
2322 unsigned int align, boundary;
2324 /* If we're passing this arg using a reg, make its stack home the
2325 aligned stack slot. */
2326 if (data->entry_parm)
2327 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2328 else
2329 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2331 stack_parm = crtl->args.internal_arg_pointer;
2332 if (offset_rtx != const0_rtx)
2333 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2334 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2336 set_mem_attributes (stack_parm, parm, 1);
2337 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2338 while promoted mode's size is needed. */
2339 if (data->promoted_mode != BLKmode
2340 && data->promoted_mode != DECL_MODE (parm))
2342 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2343 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2345 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2346 data->promoted_mode);
2347 if (offset)
2348 set_mem_offset (stack_parm,
2349 plus_constant (MEM_OFFSET (stack_parm), -offset));
2353 boundary = data->locate.boundary;
2354 align = BITS_PER_UNIT;
2356 /* If we're padding upward, we know that the alignment of the slot
2357 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2358 intentionally forcing upward padding. Otherwise we have to come
2359 up with a guess at the alignment based on OFFSET_RTX. */
2360 if (data->locate.where_pad != downward || data->entry_parm)
2361 align = boundary;
2362 else if (GET_CODE (offset_rtx) == CONST_INT)
2364 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2365 align = align & -align;
2367 set_mem_align (stack_parm, align);
2369 if (data->entry_parm)
2370 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2372 data->stack_parm = stack_parm;
2375 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2376 always valid and contiguous. */
2378 static void
2379 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2381 rtx entry_parm = data->entry_parm;
2382 rtx stack_parm = data->stack_parm;
2384 /* If this parm was passed part in regs and part in memory, pretend it
2385 arrived entirely in memory by pushing the register-part onto the stack.
2386 In the special case of a DImode or DFmode that is split, we could put
2387 it together in a pseudoreg directly, but for now that's not worth
2388 bothering with. */
2389 if (data->partial != 0)
2391 /* Handle calls that pass values in multiple non-contiguous
2392 locations. The Irix 6 ABI has examples of this. */
2393 if (GET_CODE (entry_parm) == PARALLEL)
2394 emit_group_store (validize_mem (stack_parm), entry_parm,
2395 data->passed_type,
2396 int_size_in_bytes (data->passed_type));
2397 else
2399 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2400 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2401 data->partial / UNITS_PER_WORD);
2404 entry_parm = stack_parm;
2407 /* If we didn't decide this parm came in a register, by default it came
2408 on the stack. */
2409 else if (entry_parm == NULL)
2410 entry_parm = stack_parm;
2412 /* When an argument is passed in multiple locations, we can't make use
2413 of this information, but we can save some copying if the whole argument
2414 is passed in a single register. */
2415 else if (GET_CODE (entry_parm) == PARALLEL
2416 && data->nominal_mode != BLKmode
2417 && data->passed_mode != BLKmode)
2419 size_t i, len = XVECLEN (entry_parm, 0);
2421 for (i = 0; i < len; i++)
2422 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2423 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2424 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2425 == data->passed_mode)
2426 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2428 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2429 break;
2433 data->entry_parm = entry_parm;
2436 /* A subroutine of assign_parms. Reconstitute any values which were
2437 passed in multiple registers and would fit in a single register. */
2439 static void
2440 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2442 rtx entry_parm = data->entry_parm;
2444 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2445 This can be done with register operations rather than on the
2446 stack, even if we will store the reconstituted parameter on the
2447 stack later. */
2448 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2450 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2451 emit_group_store (parmreg, entry_parm, data->passed_type,
2452 GET_MODE_SIZE (GET_MODE (entry_parm)));
2453 entry_parm = parmreg;
2456 data->entry_parm = entry_parm;
2459 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2460 always valid and properly aligned. */
2462 static void
2463 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2465 rtx stack_parm = data->stack_parm;
2467 /* If we can't trust the parm stack slot to be aligned enough for its
2468 ultimate type, don't use that slot after entry. We'll make another
2469 stack slot, if we need one. */
2470 if (stack_parm
2471 && ((STRICT_ALIGNMENT
2472 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2473 || (data->nominal_type
2474 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2475 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2476 stack_parm = NULL;
2478 /* If parm was passed in memory, and we need to convert it on entry,
2479 don't store it back in that same slot. */
2480 else if (data->entry_parm == stack_parm
2481 && data->nominal_mode != BLKmode
2482 && data->nominal_mode != data->passed_mode)
2483 stack_parm = NULL;
2485 /* If stack protection is in effect for this function, don't leave any
2486 pointers in their passed stack slots. */
2487 else if (crtl->stack_protect_guard
2488 && (flag_stack_protect == 2
2489 || data->passed_pointer
2490 || POINTER_TYPE_P (data->nominal_type)))
2491 stack_parm = NULL;
2493 data->stack_parm = stack_parm;
2496 /* A subroutine of assign_parms. Return true if the current parameter
2497 should be stored as a BLKmode in the current frame. */
2499 static bool
2500 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2502 if (data->nominal_mode == BLKmode)
2503 return true;
2504 if (GET_MODE (data->entry_parm) == BLKmode)
2505 return true;
2507 #ifdef BLOCK_REG_PADDING
2508 /* Only assign_parm_setup_block knows how to deal with register arguments
2509 that are padded at the least significant end. */
2510 if (REG_P (data->entry_parm)
2511 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2512 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2513 == (BYTES_BIG_ENDIAN ? upward : downward)))
2514 return true;
2515 #endif
2517 return false;
2520 /* A subroutine of assign_parms. Arrange for the parameter to be
2521 present and valid in DATA->STACK_RTL. */
2523 static void
2524 assign_parm_setup_block (struct assign_parm_data_all *all,
2525 tree parm, struct assign_parm_data_one *data)
2527 rtx entry_parm = data->entry_parm;
2528 rtx stack_parm = data->stack_parm;
2529 HOST_WIDE_INT size;
2530 HOST_WIDE_INT size_stored;
2532 if (GET_CODE (entry_parm) == PARALLEL)
2533 entry_parm = emit_group_move_into_temps (entry_parm);
2535 size = int_size_in_bytes (data->passed_type);
2536 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2537 if (stack_parm == 0)
2539 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2540 stack_parm = assign_stack_local (BLKmode, size_stored,
2541 DECL_ALIGN (parm));
2542 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2543 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2544 set_mem_attributes (stack_parm, parm, 1);
2547 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2548 calls that pass values in multiple non-contiguous locations. */
2549 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2551 rtx mem;
2553 /* Note that we will be storing an integral number of words.
2554 So we have to be careful to ensure that we allocate an
2555 integral number of words. We do this above when we call
2556 assign_stack_local if space was not allocated in the argument
2557 list. If it was, this will not work if PARM_BOUNDARY is not
2558 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2559 if it becomes a problem. Exception is when BLKmode arrives
2560 with arguments not conforming to word_mode. */
2562 if (data->stack_parm == 0)
2564 else if (GET_CODE (entry_parm) == PARALLEL)
2566 else
2567 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2569 mem = validize_mem (stack_parm);
2571 /* Handle values in multiple non-contiguous locations. */
2572 if (GET_CODE (entry_parm) == PARALLEL)
2574 push_to_sequence2 (all->first_conversion_insn,
2575 all->last_conversion_insn);
2576 emit_group_store (mem, entry_parm, data->passed_type, size);
2577 all->first_conversion_insn = get_insns ();
2578 all->last_conversion_insn = get_last_insn ();
2579 end_sequence ();
2582 else if (size == 0)
2585 /* If SIZE is that of a mode no bigger than a word, just use
2586 that mode's store operation. */
2587 else if (size <= UNITS_PER_WORD)
2589 enum machine_mode mode
2590 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2592 if (mode != BLKmode
2593 #ifdef BLOCK_REG_PADDING
2594 && (size == UNITS_PER_WORD
2595 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2596 != (BYTES_BIG_ENDIAN ? upward : downward)))
2597 #endif
2600 rtx reg;
2602 /* We are really truncating a word_mode value containing
2603 SIZE bytes into a value of mode MODE. If such an
2604 operation requires no actual instructions, we can refer
2605 to the value directly in mode MODE, otherwise we must
2606 start with the register in word_mode and explicitly
2607 convert it. */
2608 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2609 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2610 else
2612 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2613 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2615 emit_move_insn (change_address (mem, mode, 0), reg);
2618 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2619 machine must be aligned to the left before storing
2620 to memory. Note that the previous test doesn't
2621 handle all cases (e.g. SIZE == 3). */
2622 else if (size != UNITS_PER_WORD
2623 #ifdef BLOCK_REG_PADDING
2624 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2625 == downward)
2626 #else
2627 && BYTES_BIG_ENDIAN
2628 #endif
2631 rtx tem, x;
2632 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2633 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2635 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2636 build_int_cst (NULL_TREE, by),
2637 NULL_RTX, 1);
2638 tem = change_address (mem, word_mode, 0);
2639 emit_move_insn (tem, x);
2641 else
2642 move_block_from_reg (REGNO (entry_parm), mem,
2643 size_stored / UNITS_PER_WORD);
2645 else
2646 move_block_from_reg (REGNO (entry_parm), mem,
2647 size_stored / UNITS_PER_WORD);
2649 else if (data->stack_parm == 0)
2651 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2652 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2653 BLOCK_OP_NORMAL);
2654 all->first_conversion_insn = get_insns ();
2655 all->last_conversion_insn = get_last_insn ();
2656 end_sequence ();
2659 data->stack_parm = stack_parm;
2660 SET_DECL_RTL (parm, stack_parm);
2663 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2664 parameter. Get it there. Perform all ABI specified conversions. */
2666 static void
2667 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2668 struct assign_parm_data_one *data)
2670 rtx parmreg;
2671 enum machine_mode promoted_nominal_mode;
2672 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2673 bool did_conversion = false;
2675 /* Store the parm in a pseudoregister during the function, but we may
2676 need to do it in a wider mode. */
2678 /* This is not really promoting for a call. However we need to be
2679 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2680 promoted_nominal_mode
2681 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2683 parmreg = gen_reg_rtx (promoted_nominal_mode);
2685 if (!DECL_ARTIFICIAL (parm))
2686 mark_user_reg (parmreg);
2688 /* If this was an item that we received a pointer to,
2689 set DECL_RTL appropriately. */
2690 if (data->passed_pointer)
2692 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2693 set_mem_attributes (x, parm, 1);
2694 SET_DECL_RTL (parm, x);
2696 else
2697 SET_DECL_RTL (parm, parmreg);
2699 assign_parm_remove_parallels (data);
2701 /* Copy the value into the register. */
2702 if (data->nominal_mode != data->passed_mode
2703 || promoted_nominal_mode != data->promoted_mode)
2705 int save_tree_used;
2707 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2708 mode, by the caller. We now have to convert it to
2709 NOMINAL_MODE, if different. However, PARMREG may be in
2710 a different mode than NOMINAL_MODE if it is being stored
2711 promoted.
2713 If ENTRY_PARM is a hard register, it might be in a register
2714 not valid for operating in its mode (e.g., an odd-numbered
2715 register for a DFmode). In that case, moves are the only
2716 thing valid, so we can't do a convert from there. This
2717 occurs when the calling sequence allow such misaligned
2718 usages.
2720 In addition, the conversion may involve a call, which could
2721 clobber parameters which haven't been copied to pseudo
2722 registers yet. Therefore, we must first copy the parm to
2723 a pseudo reg here, and save the conversion until after all
2724 parameters have been moved. */
2726 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2728 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2730 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2731 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2733 if (GET_CODE (tempreg) == SUBREG
2734 && GET_MODE (tempreg) == data->nominal_mode
2735 && REG_P (SUBREG_REG (tempreg))
2736 && data->nominal_mode == data->passed_mode
2737 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2738 && GET_MODE_SIZE (GET_MODE (tempreg))
2739 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2741 /* The argument is already sign/zero extended, so note it
2742 into the subreg. */
2743 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2744 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2747 /* TREE_USED gets set erroneously during expand_assignment. */
2748 save_tree_used = TREE_USED (parm);
2749 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2750 TREE_USED (parm) = save_tree_used;
2751 all->first_conversion_insn = get_insns ();
2752 all->last_conversion_insn = get_last_insn ();
2753 end_sequence ();
2755 did_conversion = true;
2757 else
2758 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2760 /* If we were passed a pointer but the actual value can safely live
2761 in a register, put it in one. */
2762 if (data->passed_pointer
2763 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2764 /* If by-reference argument was promoted, demote it. */
2765 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2766 || use_register_for_decl (parm)))
2768 /* We can't use nominal_mode, because it will have been set to
2769 Pmode above. We must use the actual mode of the parm. */
2770 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2771 mark_user_reg (parmreg);
2773 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2775 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2776 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2778 push_to_sequence2 (all->first_conversion_insn,
2779 all->last_conversion_insn);
2780 emit_move_insn (tempreg, DECL_RTL (parm));
2781 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2782 emit_move_insn (parmreg, tempreg);
2783 all->first_conversion_insn = get_insns ();
2784 all->last_conversion_insn = get_last_insn ();
2785 end_sequence ();
2787 did_conversion = true;
2789 else
2790 emit_move_insn (parmreg, DECL_RTL (parm));
2792 SET_DECL_RTL (parm, parmreg);
2794 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2795 now the parm. */
2796 data->stack_parm = NULL;
2799 /* Mark the register as eliminable if we did no conversion and it was
2800 copied from memory at a fixed offset, and the arg pointer was not
2801 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2802 offset formed an invalid address, such memory-equivalences as we
2803 make here would screw up life analysis for it. */
2804 if (data->nominal_mode == data->passed_mode
2805 && !did_conversion
2806 && data->stack_parm != 0
2807 && MEM_P (data->stack_parm)
2808 && data->locate.offset.var == 0
2809 && reg_mentioned_p (virtual_incoming_args_rtx,
2810 XEXP (data->stack_parm, 0)))
2812 rtx linsn = get_last_insn ();
2813 rtx sinsn, set;
2815 /* Mark complex types separately. */
2816 if (GET_CODE (parmreg) == CONCAT)
2818 enum machine_mode submode
2819 = GET_MODE_INNER (GET_MODE (parmreg));
2820 int regnor = REGNO (XEXP (parmreg, 0));
2821 int regnoi = REGNO (XEXP (parmreg, 1));
2822 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2823 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2824 GET_MODE_SIZE (submode));
2826 /* Scan backwards for the set of the real and
2827 imaginary parts. */
2828 for (sinsn = linsn; sinsn != 0;
2829 sinsn = prev_nonnote_insn (sinsn))
2831 set = single_set (sinsn);
2832 if (set == 0)
2833 continue;
2835 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2836 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2837 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2838 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2841 else if ((set = single_set (linsn)) != 0
2842 && SET_DEST (set) == parmreg)
2843 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2846 /* For pointer data type, suggest pointer register. */
2847 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2848 mark_reg_pointer (parmreg,
2849 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2852 /* A subroutine of assign_parms. Allocate stack space to hold the current
2853 parameter. Get it there. Perform all ABI specified conversions. */
2855 static void
2856 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2857 struct assign_parm_data_one *data)
2859 /* Value must be stored in the stack slot STACK_PARM during function
2860 execution. */
2861 bool to_conversion = false;
2863 assign_parm_remove_parallels (data);
2865 if (data->promoted_mode != data->nominal_mode)
2867 /* Conversion is required. */
2868 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2870 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2872 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2873 to_conversion = true;
2875 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2876 TYPE_UNSIGNED (TREE_TYPE (parm)));
2878 if (data->stack_parm)
2879 /* ??? This may need a big-endian conversion on sparc64. */
2880 data->stack_parm
2881 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2884 if (data->entry_parm != data->stack_parm)
2886 rtx src, dest;
2888 if (data->stack_parm == 0)
2890 data->stack_parm
2891 = assign_stack_local (GET_MODE (data->entry_parm),
2892 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2893 TYPE_ALIGN (data->passed_type));
2894 set_mem_attributes (data->stack_parm, parm, 1);
2897 dest = validize_mem (data->stack_parm);
2898 src = validize_mem (data->entry_parm);
2900 if (MEM_P (src))
2902 /* Use a block move to handle potentially misaligned entry_parm. */
2903 if (!to_conversion)
2904 push_to_sequence2 (all->first_conversion_insn,
2905 all->last_conversion_insn);
2906 to_conversion = true;
2908 emit_block_move (dest, src,
2909 GEN_INT (int_size_in_bytes (data->passed_type)),
2910 BLOCK_OP_NORMAL);
2912 else
2913 emit_move_insn (dest, src);
2916 if (to_conversion)
2918 all->first_conversion_insn = get_insns ();
2919 all->last_conversion_insn = get_last_insn ();
2920 end_sequence ();
2923 SET_DECL_RTL (parm, data->stack_parm);
2926 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2927 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2929 static void
2930 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2932 tree parm;
2933 tree orig_fnargs = all->orig_fnargs;
2935 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2937 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2938 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2940 rtx tmp, real, imag;
2941 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2943 real = DECL_RTL (fnargs);
2944 imag = DECL_RTL (TREE_CHAIN (fnargs));
2945 if (inner != GET_MODE (real))
2947 real = gen_lowpart_SUBREG (inner, real);
2948 imag = gen_lowpart_SUBREG (inner, imag);
2951 if (TREE_ADDRESSABLE (parm))
2953 rtx rmem, imem;
2954 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2956 /* split_complex_arg put the real and imag parts in
2957 pseudos. Move them to memory. */
2958 tmp = assign_stack_local (DECL_MODE (parm), size,
2959 TYPE_ALIGN (TREE_TYPE (parm)));
2960 set_mem_attributes (tmp, parm, 1);
2961 rmem = adjust_address_nv (tmp, inner, 0);
2962 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2963 push_to_sequence2 (all->first_conversion_insn,
2964 all->last_conversion_insn);
2965 emit_move_insn (rmem, real);
2966 emit_move_insn (imem, imag);
2967 all->first_conversion_insn = get_insns ();
2968 all->last_conversion_insn = get_last_insn ();
2969 end_sequence ();
2971 else
2972 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2973 SET_DECL_RTL (parm, tmp);
2975 real = DECL_INCOMING_RTL (fnargs);
2976 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2977 if (inner != GET_MODE (real))
2979 real = gen_lowpart_SUBREG (inner, real);
2980 imag = gen_lowpart_SUBREG (inner, imag);
2982 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2983 set_decl_incoming_rtl (parm, tmp, false);
2984 fnargs = TREE_CHAIN (fnargs);
2986 else
2988 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2989 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2991 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2992 instead of the copy of decl, i.e. FNARGS. */
2993 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2994 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2997 fnargs = TREE_CHAIN (fnargs);
3001 /* Assign RTL expressions to the function's parameters. This may involve
3002 copying them into registers and using those registers as the DECL_RTL. */
3004 static void
3005 assign_parms (tree fndecl)
3007 struct assign_parm_data_all all;
3008 tree fnargs, parm;
3010 crtl->args.internal_arg_pointer
3011 = targetm.calls.internal_arg_pointer ();
3013 assign_parms_initialize_all (&all);
3014 fnargs = assign_parms_augmented_arg_list (&all);
3016 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3018 struct assign_parm_data_one data;
3020 /* Extract the type of PARM; adjust it according to ABI. */
3021 assign_parm_find_data_types (&all, parm, &data);
3023 /* Early out for errors and void parameters. */
3024 if (data.passed_mode == VOIDmode)
3026 SET_DECL_RTL (parm, const0_rtx);
3027 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3028 continue;
3031 /* Estimate stack alignment from parameter alignment. */
3032 if (SUPPORTS_STACK_ALIGNMENT)
3034 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3035 data.passed_type);
3036 if (TYPE_ALIGN (data.nominal_type) > align)
3037 align = TYPE_ALIGN (data.passed_type);
3038 if (crtl->stack_alignment_estimated < align)
3040 gcc_assert (!crtl->stack_realign_processed);
3041 crtl->stack_alignment_estimated = align;
3045 if (cfun->stdarg && !TREE_CHAIN (parm))
3046 assign_parms_setup_varargs (&all, &data, false);
3048 /* Find out where the parameter arrives in this function. */
3049 assign_parm_find_entry_rtl (&all, &data);
3051 /* Find out where stack space for this parameter might be. */
3052 if (assign_parm_is_stack_parm (&all, &data))
3054 assign_parm_find_stack_rtl (parm, &data);
3055 assign_parm_adjust_entry_rtl (&data);
3058 /* Record permanently how this parm was passed. */
3059 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3061 /* Update info on where next arg arrives in registers. */
3062 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3063 data.passed_type, data.named_arg);
3065 assign_parm_adjust_stack_rtl (&data);
3067 if (assign_parm_setup_block_p (&data))
3068 assign_parm_setup_block (&all, parm, &data);
3069 else if (data.passed_pointer || use_register_for_decl (parm))
3070 assign_parm_setup_reg (&all, parm, &data);
3071 else
3072 assign_parm_setup_stack (&all, parm, &data);
3075 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3076 assign_parms_unsplit_complex (&all, fnargs);
3078 /* Output all parameter conversion instructions (possibly including calls)
3079 now that all parameters have been copied out of hard registers. */
3080 emit_insn (all.first_conversion_insn);
3082 /* Estimate reload stack alignment from scalar return mode. */
3083 if (SUPPORTS_STACK_ALIGNMENT)
3085 if (DECL_RESULT (fndecl))
3087 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3088 enum machine_mode mode = TYPE_MODE (type);
3090 if (mode != BLKmode
3091 && mode != VOIDmode
3092 && !AGGREGATE_TYPE_P (type))
3094 unsigned int align = GET_MODE_ALIGNMENT (mode);
3095 if (crtl->stack_alignment_estimated < align)
3097 gcc_assert (!crtl->stack_realign_processed);
3098 crtl->stack_alignment_estimated = align;
3104 /* If we are receiving a struct value address as the first argument, set up
3105 the RTL for the function result. As this might require code to convert
3106 the transmitted address to Pmode, we do this here to ensure that possible
3107 preliminary conversions of the address have been emitted already. */
3108 if (all.function_result_decl)
3110 tree result = DECL_RESULT (current_function_decl);
3111 rtx addr = DECL_RTL (all.function_result_decl);
3112 rtx x;
3114 if (DECL_BY_REFERENCE (result))
3115 x = addr;
3116 else
3118 addr = convert_memory_address (Pmode, addr);
3119 x = gen_rtx_MEM (DECL_MODE (result), addr);
3120 set_mem_attributes (x, result, 1);
3122 SET_DECL_RTL (result, x);
3125 /* We have aligned all the args, so add space for the pretend args. */
3126 crtl->args.pretend_args_size = all.pretend_args_size;
3127 all.stack_args_size.constant += all.extra_pretend_bytes;
3128 crtl->args.size = all.stack_args_size.constant;
3130 /* Adjust function incoming argument size for alignment and
3131 minimum length. */
3133 #ifdef REG_PARM_STACK_SPACE
3134 crtl->args.size = MAX (crtl->args.size,
3135 REG_PARM_STACK_SPACE (fndecl));
3136 #endif
3138 crtl->args.size = CEIL_ROUND (crtl->args.size,
3139 PARM_BOUNDARY / BITS_PER_UNIT);
3141 #ifdef ARGS_GROW_DOWNWARD
3142 crtl->args.arg_offset_rtx
3143 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3144 : expand_expr (size_diffop (all.stack_args_size.var,
3145 size_int (-all.stack_args_size.constant)),
3146 NULL_RTX, VOIDmode, 0));
3147 #else
3148 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3149 #endif
3151 /* See how many bytes, if any, of its args a function should try to pop
3152 on return. */
3154 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3155 crtl->args.size);
3157 /* For stdarg.h function, save info about
3158 regs and stack space used by the named args. */
3160 crtl->args.info = all.args_so_far;
3162 /* Set the rtx used for the function return value. Put this in its
3163 own variable so any optimizers that need this information don't have
3164 to include tree.h. Do this here so it gets done when an inlined
3165 function gets output. */
3167 crtl->return_rtx
3168 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3169 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3171 /* If scalar return value was computed in a pseudo-reg, or was a named
3172 return value that got dumped to the stack, copy that to the hard
3173 return register. */
3174 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3176 tree decl_result = DECL_RESULT (fndecl);
3177 rtx decl_rtl = DECL_RTL (decl_result);
3179 if (REG_P (decl_rtl)
3180 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3181 : DECL_REGISTER (decl_result))
3183 rtx real_decl_rtl;
3185 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3186 fndecl, true);
3187 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3188 /* The delay slot scheduler assumes that crtl->return_rtx
3189 holds the hard register containing the return value, not a
3190 temporary pseudo. */
3191 crtl->return_rtx = real_decl_rtl;
3196 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3197 For all seen types, gimplify their sizes. */
3199 static tree
3200 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3202 tree t = *tp;
3204 *walk_subtrees = 0;
3205 if (TYPE_P (t))
3207 if (POINTER_TYPE_P (t))
3208 *walk_subtrees = 1;
3209 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3210 && !TYPE_SIZES_GIMPLIFIED (t))
3212 gimplify_type_sizes (t, (gimple_seq *) data);
3213 *walk_subtrees = 1;
3217 return NULL;
3220 /* Gimplify the parameter list for current_function_decl. This involves
3221 evaluating SAVE_EXPRs of variable sized parameters and generating code
3222 to implement callee-copies reference parameters. Returns a sequence of
3223 statements to add to the beginning of the function. */
3225 gimple_seq
3226 gimplify_parameters (void)
3228 struct assign_parm_data_all all;
3229 tree fnargs, parm;
3230 gimple_seq stmts = NULL;
3232 assign_parms_initialize_all (&all);
3233 fnargs = assign_parms_augmented_arg_list (&all);
3235 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3237 struct assign_parm_data_one data;
3239 /* Extract the type of PARM; adjust it according to ABI. */
3240 assign_parm_find_data_types (&all, parm, &data);
3242 /* Early out for errors and void parameters. */
3243 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3244 continue;
3246 /* Update info on where next arg arrives in registers. */
3247 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3248 data.passed_type, data.named_arg);
3250 /* ??? Once upon a time variable_size stuffed parameter list
3251 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3252 turned out to be less than manageable in the gimple world.
3253 Now we have to hunt them down ourselves. */
3254 walk_tree_without_duplicates (&data.passed_type,
3255 gimplify_parm_type, &stmts);
3257 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3259 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3260 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3263 if (data.passed_pointer)
3265 tree type = TREE_TYPE (data.passed_type);
3266 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3267 type, data.named_arg))
3269 tree local, t;
3271 /* For constant-sized objects, this is trivial; for
3272 variable-sized objects, we have to play games. */
3273 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3274 && !(flag_stack_check == GENERIC_STACK_CHECK
3275 && compare_tree_int (DECL_SIZE_UNIT (parm),
3276 STACK_CHECK_MAX_VAR_SIZE) > 0))
3278 local = create_tmp_var (type, get_name (parm));
3279 DECL_IGNORED_P (local) = 0;
3280 /* If PARM was addressable, move that flag over
3281 to the local copy, as its address will be taken,
3282 not the PARMs. */
3283 if (TREE_ADDRESSABLE (parm))
3285 TREE_ADDRESSABLE (parm) = 0;
3286 TREE_ADDRESSABLE (local) = 1;
3289 else
3291 tree ptr_type, addr;
3293 ptr_type = build_pointer_type (type);
3294 addr = create_tmp_var (ptr_type, get_name (parm));
3295 DECL_IGNORED_P (addr) = 0;
3296 local = build_fold_indirect_ref (addr);
3298 t = built_in_decls[BUILT_IN_ALLOCA];
3299 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3300 t = fold_convert (ptr_type, t);
3301 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3302 gimplify_and_add (t, &stmts);
3305 gimplify_assign (local, parm, &stmts);
3307 SET_DECL_VALUE_EXPR (parm, local);
3308 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3313 return stmts;
3316 /* Compute the size and offset from the start of the stacked arguments for a
3317 parm passed in mode PASSED_MODE and with type TYPE.
3319 INITIAL_OFFSET_PTR points to the current offset into the stacked
3320 arguments.
3322 The starting offset and size for this parm are returned in
3323 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3324 nonzero, the offset is that of stack slot, which is returned in
3325 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3326 padding required from the initial offset ptr to the stack slot.
3328 IN_REGS is nonzero if the argument will be passed in registers. It will
3329 never be set if REG_PARM_STACK_SPACE is not defined.
3331 FNDECL is the function in which the argument was defined.
3333 There are two types of rounding that are done. The first, controlled by
3334 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3335 list to be aligned to the specific boundary (in bits). This rounding
3336 affects the initial and starting offsets, but not the argument size.
3338 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3339 optionally rounds the size of the parm to PARM_BOUNDARY. The
3340 initial offset is not affected by this rounding, while the size always
3341 is and the starting offset may be. */
3343 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3344 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3345 callers pass in the total size of args so far as
3346 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3348 void
3349 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3350 int partial, tree fndecl ATTRIBUTE_UNUSED,
3351 struct args_size *initial_offset_ptr,
3352 struct locate_and_pad_arg_data *locate)
3354 tree sizetree;
3355 enum direction where_pad;
3356 unsigned int boundary;
3357 int reg_parm_stack_space = 0;
3358 int part_size_in_regs;
3360 #ifdef REG_PARM_STACK_SPACE
3361 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3363 /* If we have found a stack parm before we reach the end of the
3364 area reserved for registers, skip that area. */
3365 if (! in_regs)
3367 if (reg_parm_stack_space > 0)
3369 if (initial_offset_ptr->var)
3371 initial_offset_ptr->var
3372 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3373 ssize_int (reg_parm_stack_space));
3374 initial_offset_ptr->constant = 0;
3376 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3377 initial_offset_ptr->constant = reg_parm_stack_space;
3380 #endif /* REG_PARM_STACK_SPACE */
3382 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3384 sizetree
3385 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3386 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3387 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3388 locate->where_pad = where_pad;
3390 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3391 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3392 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3394 locate->boundary = boundary;
3396 if (SUPPORTS_STACK_ALIGNMENT)
3398 /* stack_alignment_estimated can't change after stack has been
3399 realigned. */
3400 if (crtl->stack_alignment_estimated < boundary)
3402 if (!crtl->stack_realign_processed)
3403 crtl->stack_alignment_estimated = boundary;
3404 else
3406 /* If stack is realigned and stack alignment value
3407 hasn't been finalized, it is OK not to increase
3408 stack_alignment_estimated. The bigger alignment
3409 requirement is recorded in stack_alignment_needed
3410 below. */
3411 gcc_assert (!crtl->stack_realign_finalized
3412 && crtl->stack_realign_needed);
3417 /* Remember if the outgoing parameter requires extra alignment on the
3418 calling function side. */
3419 if (crtl->stack_alignment_needed < boundary)
3420 crtl->stack_alignment_needed = boundary;
3421 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3422 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3423 if (crtl->preferred_stack_boundary < boundary)
3424 crtl->preferred_stack_boundary = boundary;
3426 #ifdef ARGS_GROW_DOWNWARD
3427 locate->slot_offset.constant = -initial_offset_ptr->constant;
3428 if (initial_offset_ptr->var)
3429 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3430 initial_offset_ptr->var);
3433 tree s2 = sizetree;
3434 if (where_pad != none
3435 && (!host_integerp (sizetree, 1)
3436 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3437 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3438 SUB_PARM_SIZE (locate->slot_offset, s2);
3441 locate->slot_offset.constant += part_size_in_regs;
3443 if (!in_regs
3444 #ifdef REG_PARM_STACK_SPACE
3445 || REG_PARM_STACK_SPACE (fndecl) > 0
3446 #endif
3448 pad_to_arg_alignment (&locate->slot_offset, boundary,
3449 &locate->alignment_pad);
3451 locate->size.constant = (-initial_offset_ptr->constant
3452 - locate->slot_offset.constant);
3453 if (initial_offset_ptr->var)
3454 locate->size.var = size_binop (MINUS_EXPR,
3455 size_binop (MINUS_EXPR,
3456 ssize_int (0),
3457 initial_offset_ptr->var),
3458 locate->slot_offset.var);
3460 /* Pad_below needs the pre-rounded size to know how much to pad
3461 below. */
3462 locate->offset = locate->slot_offset;
3463 if (where_pad == downward)
3464 pad_below (&locate->offset, passed_mode, sizetree);
3466 #else /* !ARGS_GROW_DOWNWARD */
3467 if (!in_regs
3468 #ifdef REG_PARM_STACK_SPACE
3469 || REG_PARM_STACK_SPACE (fndecl) > 0
3470 #endif
3472 pad_to_arg_alignment (initial_offset_ptr, boundary,
3473 &locate->alignment_pad);
3474 locate->slot_offset = *initial_offset_ptr;
3476 #ifdef PUSH_ROUNDING
3477 if (passed_mode != BLKmode)
3478 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3479 #endif
3481 /* Pad_below needs the pre-rounded size to know how much to pad below
3482 so this must be done before rounding up. */
3483 locate->offset = locate->slot_offset;
3484 if (where_pad == downward)
3485 pad_below (&locate->offset, passed_mode, sizetree);
3487 if (where_pad != none
3488 && (!host_integerp (sizetree, 1)
3489 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3490 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3492 ADD_PARM_SIZE (locate->size, sizetree);
3494 locate->size.constant -= part_size_in_regs;
3495 #endif /* ARGS_GROW_DOWNWARD */
3497 #ifdef FUNCTION_ARG_OFFSET
3498 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3499 #endif
3502 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3503 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3505 static void
3506 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3507 struct args_size *alignment_pad)
3509 tree save_var = NULL_TREE;
3510 HOST_WIDE_INT save_constant = 0;
3511 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3512 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3514 #ifdef SPARC_STACK_BOUNDARY_HACK
3515 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3516 the real alignment of %sp. However, when it does this, the
3517 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3518 if (SPARC_STACK_BOUNDARY_HACK)
3519 sp_offset = 0;
3520 #endif
3522 if (boundary > PARM_BOUNDARY)
3524 save_var = offset_ptr->var;
3525 save_constant = offset_ptr->constant;
3528 alignment_pad->var = NULL_TREE;
3529 alignment_pad->constant = 0;
3531 if (boundary > BITS_PER_UNIT)
3533 if (offset_ptr->var)
3535 tree sp_offset_tree = ssize_int (sp_offset);
3536 tree offset = size_binop (PLUS_EXPR,
3537 ARGS_SIZE_TREE (*offset_ptr),
3538 sp_offset_tree);
3539 #ifdef ARGS_GROW_DOWNWARD
3540 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3541 #else
3542 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3543 #endif
3545 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3546 /* ARGS_SIZE_TREE includes constant term. */
3547 offset_ptr->constant = 0;
3548 if (boundary > PARM_BOUNDARY)
3549 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3550 save_var);
3552 else
3554 offset_ptr->constant = -sp_offset +
3555 #ifdef ARGS_GROW_DOWNWARD
3556 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3557 #else
3558 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3559 #endif
3560 if (boundary > PARM_BOUNDARY)
3561 alignment_pad->constant = offset_ptr->constant - save_constant;
3566 static void
3567 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3569 if (passed_mode != BLKmode)
3571 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3572 offset_ptr->constant
3573 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3574 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3575 - GET_MODE_SIZE (passed_mode));
3577 else
3579 if (TREE_CODE (sizetree) != INTEGER_CST
3580 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3582 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3583 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3584 /* Add it in. */
3585 ADD_PARM_SIZE (*offset_ptr, s2);
3586 SUB_PARM_SIZE (*offset_ptr, sizetree);
3592 /* True if register REGNO was alive at a place where `setjmp' was
3593 called and was set more than once or is an argument. Such regs may
3594 be clobbered by `longjmp'. */
3596 static bool
3597 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3599 /* There appear to be cases where some local vars never reach the
3600 backend but have bogus regnos. */
3601 if (regno >= max_reg_num ())
3602 return false;
3604 return ((REG_N_SETS (regno) > 1
3605 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3606 && REGNO_REG_SET_P (setjmp_crosses, regno));
3609 /* Walk the tree of blocks describing the binding levels within a
3610 function and warn about variables the might be killed by setjmp or
3611 vfork. This is done after calling flow_analysis before register
3612 allocation since that will clobber the pseudo-regs to hard
3613 regs. */
3615 static void
3616 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3618 tree decl, sub;
3620 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3622 if (TREE_CODE (decl) == VAR_DECL
3623 && DECL_RTL_SET_P (decl)
3624 && REG_P (DECL_RTL (decl))
3625 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3626 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3627 " %<longjmp%> or %<vfork%>", decl);
3630 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3631 setjmp_vars_warning (setjmp_crosses, sub);
3634 /* Do the appropriate part of setjmp_vars_warning
3635 but for arguments instead of local variables. */
3637 static void
3638 setjmp_args_warning (bitmap setjmp_crosses)
3640 tree decl;
3641 for (decl = DECL_ARGUMENTS (current_function_decl);
3642 decl; decl = TREE_CHAIN (decl))
3643 if (DECL_RTL (decl) != 0
3644 && REG_P (DECL_RTL (decl))
3645 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3646 warning (OPT_Wclobbered,
3647 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3648 decl);
3651 /* Generate warning messages for variables live across setjmp. */
3653 void
3654 generate_setjmp_warnings (void)
3656 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3658 if (n_basic_blocks == NUM_FIXED_BLOCKS
3659 || bitmap_empty_p (setjmp_crosses))
3660 return;
3662 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3663 setjmp_args_warning (setjmp_crosses);
3667 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3668 and create duplicate blocks. */
3669 /* ??? Need an option to either create block fragments or to create
3670 abstract origin duplicates of a source block. It really depends
3671 on what optimization has been performed. */
3673 void
3674 reorder_blocks (void)
3676 tree block = DECL_INITIAL (current_function_decl);
3677 VEC(tree,heap) *block_stack;
3679 if (block == NULL_TREE)
3680 return;
3682 block_stack = VEC_alloc (tree, heap, 10);
3684 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3685 clear_block_marks (block);
3687 /* Prune the old trees away, so that they don't get in the way. */
3688 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3689 BLOCK_CHAIN (block) = NULL_TREE;
3691 /* Recreate the block tree from the note nesting. */
3692 reorder_blocks_1 (get_insns (), block, &block_stack);
3693 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3695 VEC_free (tree, heap, block_stack);
3698 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3700 void
3701 clear_block_marks (tree block)
3703 while (block)
3705 TREE_ASM_WRITTEN (block) = 0;
3706 clear_block_marks (BLOCK_SUBBLOCKS (block));
3707 block = BLOCK_CHAIN (block);
3711 static void
3712 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3714 rtx insn;
3716 for (insn = insns; insn; insn = NEXT_INSN (insn))
3718 if (NOTE_P (insn))
3720 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3722 tree block = NOTE_BLOCK (insn);
3723 tree origin;
3725 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3726 ? BLOCK_FRAGMENT_ORIGIN (block)
3727 : block);
3729 /* If we have seen this block before, that means it now
3730 spans multiple address regions. Create a new fragment. */
3731 if (TREE_ASM_WRITTEN (block))
3733 tree new_block = copy_node (block);
3735 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3736 BLOCK_FRAGMENT_CHAIN (new_block)
3737 = BLOCK_FRAGMENT_CHAIN (origin);
3738 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3740 NOTE_BLOCK (insn) = new_block;
3741 block = new_block;
3744 BLOCK_SUBBLOCKS (block) = 0;
3745 TREE_ASM_WRITTEN (block) = 1;
3746 /* When there's only one block for the entire function,
3747 current_block == block and we mustn't do this, it
3748 will cause infinite recursion. */
3749 if (block != current_block)
3751 if (block != origin)
3752 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3754 BLOCK_SUPERCONTEXT (block) = current_block;
3755 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3756 BLOCK_SUBBLOCKS (current_block) = block;
3757 current_block = origin;
3759 VEC_safe_push (tree, heap, *p_block_stack, block);
3761 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3763 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3764 BLOCK_SUBBLOCKS (current_block)
3765 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3766 current_block = BLOCK_SUPERCONTEXT (current_block);
3772 /* Reverse the order of elements in the chain T of blocks,
3773 and return the new head of the chain (old last element). */
3775 tree
3776 blocks_nreverse (tree t)
3778 tree prev = 0, decl, next;
3779 for (decl = t; decl; decl = next)
3781 next = BLOCK_CHAIN (decl);
3782 BLOCK_CHAIN (decl) = prev;
3783 prev = decl;
3785 return prev;
3788 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3789 non-NULL, list them all into VECTOR, in a depth-first preorder
3790 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3791 blocks. */
3793 static int
3794 all_blocks (tree block, tree *vector)
3796 int n_blocks = 0;
3798 while (block)
3800 TREE_ASM_WRITTEN (block) = 0;
3802 /* Record this block. */
3803 if (vector)
3804 vector[n_blocks] = block;
3806 ++n_blocks;
3808 /* Record the subblocks, and their subblocks... */
3809 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3810 vector ? vector + n_blocks : 0);
3811 block = BLOCK_CHAIN (block);
3814 return n_blocks;
3817 /* Return a vector containing all the blocks rooted at BLOCK. The
3818 number of elements in the vector is stored in N_BLOCKS_P. The
3819 vector is dynamically allocated; it is the caller's responsibility
3820 to call `free' on the pointer returned. */
3822 static tree *
3823 get_block_vector (tree block, int *n_blocks_p)
3825 tree *block_vector;
3827 *n_blocks_p = all_blocks (block, NULL);
3828 block_vector = XNEWVEC (tree, *n_blocks_p);
3829 all_blocks (block, block_vector);
3831 return block_vector;
3834 static GTY(()) int next_block_index = 2;
3836 /* Set BLOCK_NUMBER for all the blocks in FN. */
3838 void
3839 number_blocks (tree fn)
3841 int i;
3842 int n_blocks;
3843 tree *block_vector;
3845 /* For SDB and XCOFF debugging output, we start numbering the blocks
3846 from 1 within each function, rather than keeping a running
3847 count. */
3848 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3849 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3850 next_block_index = 1;
3851 #endif
3853 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3855 /* The top-level BLOCK isn't numbered at all. */
3856 for (i = 1; i < n_blocks; ++i)
3857 /* We number the blocks from two. */
3858 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3860 free (block_vector);
3862 return;
3865 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3867 tree
3868 debug_find_var_in_block_tree (tree var, tree block)
3870 tree t;
3872 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3873 if (t == var)
3874 return block;
3876 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3878 tree ret = debug_find_var_in_block_tree (var, t);
3879 if (ret)
3880 return ret;
3883 return NULL_TREE;
3886 /* Keep track of whether we're in a dummy function context. If we are,
3887 we don't want to invoke the set_current_function hook, because we'll
3888 get into trouble if the hook calls target_reinit () recursively or
3889 when the initial initialization is not yet complete. */
3891 static bool in_dummy_function;
3893 /* Invoke the target hook when setting cfun. Update the optimization options
3894 if the function uses different options than the default. */
3896 static void
3897 invoke_set_current_function_hook (tree fndecl)
3899 if (!in_dummy_function)
3901 tree opts = ((fndecl)
3902 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
3903 : optimization_default_node);
3905 if (!opts)
3906 opts = optimization_default_node;
3908 /* Change optimization options if needed. */
3909 if (optimization_current_node != opts)
3911 optimization_current_node = opts;
3912 cl_optimization_restore (TREE_OPTIMIZATION (opts));
3915 targetm.set_current_function (fndecl);
3919 /* cfun should never be set directly; use this function. */
3921 void
3922 set_cfun (struct function *new_cfun)
3924 if (cfun != new_cfun)
3926 cfun = new_cfun;
3927 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3931 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3933 static VEC(function_p,heap) *cfun_stack;
3935 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3937 void
3938 push_cfun (struct function *new_cfun)
3940 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3941 set_cfun (new_cfun);
3944 /* Pop cfun from the stack. */
3946 void
3947 pop_cfun (void)
3949 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3950 set_cfun (new_cfun);
3953 /* Return value of funcdef and increase it. */
3955 get_next_funcdef_no (void)
3957 return funcdef_no++;
3960 /* Allocate a function structure for FNDECL and set its contents
3961 to the defaults. Set cfun to the newly-allocated object.
3962 Some of the helper functions invoked during initialization assume
3963 that cfun has already been set. Therefore, assign the new object
3964 directly into cfun and invoke the back end hook explicitly at the
3965 very end, rather than initializing a temporary and calling set_cfun
3966 on it.
3968 ABSTRACT_P is true if this is a function that will never be seen by
3969 the middle-end. Such functions are front-end concepts (like C++
3970 function templates) that do not correspond directly to functions
3971 placed in object files. */
3973 void
3974 allocate_struct_function (tree fndecl, bool abstract_p)
3976 tree result;
3977 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3979 cfun = GGC_CNEW (struct function);
3981 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3983 init_eh_for_function ();
3985 if (init_machine_status)
3986 cfun->machine = (*init_machine_status) ();
3988 #ifdef OVERRIDE_ABI_FORMAT
3989 OVERRIDE_ABI_FORMAT (fndecl);
3990 #endif
3992 invoke_set_current_function_hook (fndecl);
3994 if (fndecl != NULL_TREE)
3996 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3997 cfun->decl = fndecl;
3998 current_function_funcdef_no = get_next_funcdef_no ();
4000 result = DECL_RESULT (fndecl);
4001 if (!abstract_p && aggregate_value_p (result, fndecl))
4003 #ifdef PCC_STATIC_STRUCT_RETURN
4004 cfun->returns_pcc_struct = 1;
4005 #endif
4006 cfun->returns_struct = 1;
4009 cfun->stdarg
4010 = (fntype
4011 && TYPE_ARG_TYPES (fntype) != 0
4012 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4013 != void_type_node));
4015 /* Assume all registers in stdarg functions need to be saved. */
4016 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4017 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4021 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4022 instead of just setting it. */
4024 void
4025 push_struct_function (tree fndecl)
4027 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4028 allocate_struct_function (fndecl, false);
4031 /* Reset cfun, and other non-struct-function variables to defaults as
4032 appropriate for emitting rtl at the start of a function. */
4034 static void
4035 prepare_function_start (void)
4037 gcc_assert (!crtl->emit.x_last_insn);
4038 init_emit ();
4039 init_varasm_status ();
4040 init_expr ();
4041 default_rtl_profile ();
4043 cse_not_expected = ! optimize;
4045 /* Caller save not needed yet. */
4046 caller_save_needed = 0;
4048 /* We haven't done register allocation yet. */
4049 reg_renumber = 0;
4051 /* Indicate that we have not instantiated virtual registers yet. */
4052 virtuals_instantiated = 0;
4054 /* Indicate that we want CONCATs now. */
4055 generating_concat_p = 1;
4057 /* Indicate we have no need of a frame pointer yet. */
4058 frame_pointer_needed = 0;
4061 /* Initialize the rtl expansion mechanism so that we can do simple things
4062 like generate sequences. This is used to provide a context during global
4063 initialization of some passes. You must call expand_dummy_function_end
4064 to exit this context. */
4066 void
4067 init_dummy_function_start (void)
4069 gcc_assert (!in_dummy_function);
4070 in_dummy_function = true;
4071 push_struct_function (NULL_TREE);
4072 prepare_function_start ();
4075 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4076 and initialize static variables for generating RTL for the statements
4077 of the function. */
4079 void
4080 init_function_start (tree subr)
4082 if (subr && DECL_STRUCT_FUNCTION (subr))
4083 set_cfun (DECL_STRUCT_FUNCTION (subr));
4084 else
4085 allocate_struct_function (subr, false);
4086 prepare_function_start ();
4088 /* Warn if this value is an aggregate type,
4089 regardless of which calling convention we are using for it. */
4090 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4091 warning (OPT_Waggregate_return, "function returns an aggregate");
4094 /* Make sure all values used by the optimization passes have sane
4095 defaults. */
4096 unsigned int
4097 init_function_for_compilation (void)
4099 reg_renumber = 0;
4101 /* No prologue/epilogue insns yet. Make sure that these vectors are
4102 empty. */
4103 gcc_assert (VEC_length (int, prologue) == 0);
4104 gcc_assert (VEC_length (int, epilogue) == 0);
4105 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4106 return 0;
4109 struct rtl_opt_pass pass_init_function =
4112 RTL_PASS,
4113 NULL, /* name */
4114 NULL, /* gate */
4115 init_function_for_compilation, /* execute */
4116 NULL, /* sub */
4117 NULL, /* next */
4118 0, /* static_pass_number */
4119 0, /* tv_id */
4120 0, /* properties_required */
4121 0, /* properties_provided */
4122 0, /* properties_destroyed */
4123 0, /* todo_flags_start */
4124 0 /* todo_flags_finish */
4129 void
4130 expand_main_function (void)
4132 #if (defined(INVOKE__main) \
4133 || (!defined(HAS_INIT_SECTION) \
4134 && !defined(INIT_SECTION_ASM_OP) \
4135 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4136 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4137 #endif
4140 /* Expand code to initialize the stack_protect_guard. This is invoked at
4141 the beginning of a function to be protected. */
4143 #ifndef HAVE_stack_protect_set
4144 # define HAVE_stack_protect_set 0
4145 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4146 #endif
4148 void
4149 stack_protect_prologue (void)
4151 tree guard_decl = targetm.stack_protect_guard ();
4152 rtx x, y;
4154 /* Avoid expand_expr here, because we don't want guard_decl pulled
4155 into registers unless absolutely necessary. And we know that
4156 crtl->stack_protect_guard is a local stack slot, so this skips
4157 all the fluff. */
4158 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4159 y = validize_mem (DECL_RTL (guard_decl));
4161 /* Allow the target to copy from Y to X without leaking Y into a
4162 register. */
4163 if (HAVE_stack_protect_set)
4165 rtx insn = gen_stack_protect_set (x, y);
4166 if (insn)
4168 emit_insn (insn);
4169 return;
4173 /* Otherwise do a straight move. */
4174 emit_move_insn (x, y);
4177 /* Expand code to verify the stack_protect_guard. This is invoked at
4178 the end of a function to be protected. */
4180 #ifndef HAVE_stack_protect_test
4181 # define HAVE_stack_protect_test 0
4182 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4183 #endif
4185 void
4186 stack_protect_epilogue (void)
4188 tree guard_decl = targetm.stack_protect_guard ();
4189 rtx label = gen_label_rtx ();
4190 rtx x, y, tmp;
4192 /* Avoid expand_expr here, because we don't want guard_decl pulled
4193 into registers unless absolutely necessary. And we know that
4194 crtl->stack_protect_guard is a local stack slot, so this skips
4195 all the fluff. */
4196 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4197 y = validize_mem (DECL_RTL (guard_decl));
4199 /* Allow the target to compare Y with X without leaking either into
4200 a register. */
4201 switch (HAVE_stack_protect_test != 0)
4203 case 1:
4204 tmp = gen_stack_protect_test (x, y, label);
4205 if (tmp)
4207 emit_insn (tmp);
4208 break;
4210 /* FALLTHRU */
4212 default:
4213 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4214 break;
4217 /* The noreturn predictor has been moved to the tree level. The rtl-level
4218 predictors estimate this branch about 20%, which isn't enough to get
4219 things moved out of line. Since this is the only extant case of adding
4220 a noreturn function at the rtl level, it doesn't seem worth doing ought
4221 except adding the prediction by hand. */
4222 tmp = get_last_insn ();
4223 if (JUMP_P (tmp))
4224 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4226 expand_expr_stmt (targetm.stack_protect_fail ());
4227 emit_label (label);
4230 /* Start the RTL for a new function, and set variables used for
4231 emitting RTL.
4232 SUBR is the FUNCTION_DECL node.
4233 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4234 the function's parameters, which must be run at any return statement. */
4236 void
4237 expand_function_start (tree subr)
4239 /* Make sure volatile mem refs aren't considered
4240 valid operands of arithmetic insns. */
4241 init_recog_no_volatile ();
4243 crtl->profile
4244 = (profile_flag
4245 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4247 crtl->limit_stack
4248 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4250 /* Make the label for return statements to jump to. Do not special
4251 case machines with special return instructions -- they will be
4252 handled later during jump, ifcvt, or epilogue creation. */
4253 return_label = gen_label_rtx ();
4255 /* Initialize rtx used to return the value. */
4256 /* Do this before assign_parms so that we copy the struct value address
4257 before any library calls that assign parms might generate. */
4259 /* Decide whether to return the value in memory or in a register. */
4260 if (aggregate_value_p (DECL_RESULT (subr), subr))
4262 /* Returning something that won't go in a register. */
4263 rtx value_address = 0;
4265 #ifdef PCC_STATIC_STRUCT_RETURN
4266 if (cfun->returns_pcc_struct)
4268 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4269 value_address = assemble_static_space (size);
4271 else
4272 #endif
4274 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4275 /* Expect to be passed the address of a place to store the value.
4276 If it is passed as an argument, assign_parms will take care of
4277 it. */
4278 if (sv)
4280 value_address = gen_reg_rtx (Pmode);
4281 emit_move_insn (value_address, sv);
4284 if (value_address)
4286 rtx x = value_address;
4287 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4289 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4290 set_mem_attributes (x, DECL_RESULT (subr), 1);
4292 SET_DECL_RTL (DECL_RESULT (subr), x);
4295 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4296 /* If return mode is void, this decl rtl should not be used. */
4297 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4298 else
4300 /* Compute the return values into a pseudo reg, which we will copy
4301 into the true return register after the cleanups are done. */
4302 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4303 if (TYPE_MODE (return_type) != BLKmode
4304 && targetm.calls.return_in_msb (return_type))
4305 /* expand_function_end will insert the appropriate padding in
4306 this case. Use the return value's natural (unpadded) mode
4307 within the function proper. */
4308 SET_DECL_RTL (DECL_RESULT (subr),
4309 gen_reg_rtx (TYPE_MODE (return_type)));
4310 else
4312 /* In order to figure out what mode to use for the pseudo, we
4313 figure out what the mode of the eventual return register will
4314 actually be, and use that. */
4315 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4317 /* Structures that are returned in registers are not
4318 aggregate_value_p, so we may see a PARALLEL or a REG. */
4319 if (REG_P (hard_reg))
4320 SET_DECL_RTL (DECL_RESULT (subr),
4321 gen_reg_rtx (GET_MODE (hard_reg)));
4322 else
4324 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4325 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4329 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4330 result to the real return register(s). */
4331 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4334 /* Initialize rtx for parameters and local variables.
4335 In some cases this requires emitting insns. */
4336 assign_parms (subr);
4338 /* If function gets a static chain arg, store it. */
4339 if (cfun->static_chain_decl)
4341 tree parm = cfun->static_chain_decl;
4342 rtx local = gen_reg_rtx (Pmode);
4344 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4345 SET_DECL_RTL (parm, local);
4346 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4348 emit_move_insn (local, static_chain_incoming_rtx);
4351 /* If the function receives a non-local goto, then store the
4352 bits we need to restore the frame pointer. */
4353 if (cfun->nonlocal_goto_save_area)
4355 tree t_save;
4356 rtx r_save;
4358 /* ??? We need to do this save early. Unfortunately here is
4359 before the frame variable gets declared. Help out... */
4360 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4361 if (!DECL_RTL_SET_P (var))
4362 expand_decl (var);
4364 t_save = build4 (ARRAY_REF, ptr_type_node,
4365 cfun->nonlocal_goto_save_area,
4366 integer_zero_node, NULL_TREE, NULL_TREE);
4367 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4368 r_save = convert_memory_address (Pmode, r_save);
4370 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4371 update_nonlocal_goto_save_area ();
4374 /* The following was moved from init_function_start.
4375 The move is supposed to make sdb output more accurate. */
4376 /* Indicate the beginning of the function body,
4377 as opposed to parm setup. */
4378 emit_note (NOTE_INSN_FUNCTION_BEG);
4380 gcc_assert (NOTE_P (get_last_insn ()));
4382 parm_birth_insn = get_last_insn ();
4384 if (crtl->profile)
4386 #ifdef PROFILE_HOOK
4387 PROFILE_HOOK (current_function_funcdef_no);
4388 #endif
4391 /* After the display initializations is where the stack checking
4392 probe should go. */
4393 if(flag_stack_check)
4394 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4396 /* Make sure there is a line number after the function entry setup code. */
4397 force_next_line_note ();
4400 /* Undo the effects of init_dummy_function_start. */
4401 void
4402 expand_dummy_function_end (void)
4404 gcc_assert (in_dummy_function);
4406 /* End any sequences that failed to be closed due to syntax errors. */
4407 while (in_sequence_p ())
4408 end_sequence ();
4410 /* Outside function body, can't compute type's actual size
4411 until next function's body starts. */
4413 free_after_parsing (cfun);
4414 free_after_compilation (cfun);
4415 pop_cfun ();
4416 in_dummy_function = false;
4419 /* Call DOIT for each hard register used as a return value from
4420 the current function. */
4422 void
4423 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4425 rtx outgoing = crtl->return_rtx;
4427 if (! outgoing)
4428 return;
4430 if (REG_P (outgoing))
4431 (*doit) (outgoing, arg);
4432 else if (GET_CODE (outgoing) == PARALLEL)
4434 int i;
4436 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4438 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4440 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4441 (*doit) (x, arg);
4446 static void
4447 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4449 emit_clobber (reg);
4452 void
4453 clobber_return_register (void)
4455 diddle_return_value (do_clobber_return_reg, NULL);
4457 /* In case we do use pseudo to return value, clobber it too. */
4458 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4460 tree decl_result = DECL_RESULT (current_function_decl);
4461 rtx decl_rtl = DECL_RTL (decl_result);
4462 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4464 do_clobber_return_reg (decl_rtl, NULL);
4469 static void
4470 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4472 emit_use (reg);
4475 static void
4476 use_return_register (void)
4478 diddle_return_value (do_use_return_reg, NULL);
4481 /* Possibly warn about unused parameters. */
4482 void
4483 do_warn_unused_parameter (tree fn)
4485 tree decl;
4487 for (decl = DECL_ARGUMENTS (fn);
4488 decl; decl = TREE_CHAIN (decl))
4489 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4490 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4491 && !TREE_NO_WARNING (decl))
4492 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4495 static GTY(()) rtx initial_trampoline;
4497 /* Generate RTL for the end of the current function. */
4499 void
4500 expand_function_end (void)
4502 rtx clobber_after;
4504 /* If arg_pointer_save_area was referenced only from a nested
4505 function, we will not have initialized it yet. Do that now. */
4506 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4507 get_arg_pointer_save_area ();
4509 /* If we are doing generic stack checking and this function makes calls,
4510 do a stack probe at the start of the function to ensure we have enough
4511 space for another stack frame. */
4512 if (flag_stack_check == GENERIC_STACK_CHECK)
4514 rtx insn, seq;
4516 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4517 if (CALL_P (insn))
4519 start_sequence ();
4520 probe_stack_range (STACK_OLD_CHECK_PROTECT,
4521 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4522 seq = get_insns ();
4523 end_sequence ();
4524 emit_insn_before (seq, stack_check_probe_note);
4525 break;
4529 /* End any sequences that failed to be closed due to syntax errors. */
4530 while (in_sequence_p ())
4531 end_sequence ();
4533 clear_pending_stack_adjust ();
4534 do_pending_stack_adjust ();
4536 /* Output a linenumber for the end of the function.
4537 SDB depends on this. */
4538 force_next_line_note ();
4539 set_curr_insn_source_location (input_location);
4541 /* Before the return label (if any), clobber the return
4542 registers so that they are not propagated live to the rest of
4543 the function. This can only happen with functions that drop
4544 through; if there had been a return statement, there would
4545 have either been a return rtx, or a jump to the return label.
4547 We delay actual code generation after the current_function_value_rtx
4548 is computed. */
4549 clobber_after = get_last_insn ();
4551 /* Output the label for the actual return from the function. */
4552 emit_label (return_label);
4554 if (USING_SJLJ_EXCEPTIONS)
4556 /* Let except.c know where it should emit the call to unregister
4557 the function context for sjlj exceptions. */
4558 if (flag_exceptions)
4559 sjlj_emit_function_exit_after (get_last_insn ());
4561 else
4563 /* We want to ensure that instructions that may trap are not
4564 moved into the epilogue by scheduling, because we don't
4565 always emit unwind information for the epilogue. */
4566 if (flag_non_call_exceptions)
4567 emit_insn (gen_blockage ());
4570 /* If this is an implementation of throw, do what's necessary to
4571 communicate between __builtin_eh_return and the epilogue. */
4572 expand_eh_return ();
4574 /* If scalar return value was computed in a pseudo-reg, or was a named
4575 return value that got dumped to the stack, copy that to the hard
4576 return register. */
4577 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4579 tree decl_result = DECL_RESULT (current_function_decl);
4580 rtx decl_rtl = DECL_RTL (decl_result);
4582 if (REG_P (decl_rtl)
4583 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4584 : DECL_REGISTER (decl_result))
4586 rtx real_decl_rtl = crtl->return_rtx;
4588 /* This should be set in assign_parms. */
4589 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4591 /* If this is a BLKmode structure being returned in registers,
4592 then use the mode computed in expand_return. Note that if
4593 decl_rtl is memory, then its mode may have been changed,
4594 but that crtl->return_rtx has not. */
4595 if (GET_MODE (real_decl_rtl) == BLKmode)
4596 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4598 /* If a non-BLKmode return value should be padded at the least
4599 significant end of the register, shift it left by the appropriate
4600 amount. BLKmode results are handled using the group load/store
4601 machinery. */
4602 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4603 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4605 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4606 REGNO (real_decl_rtl)),
4607 decl_rtl);
4608 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4610 /* If a named return value dumped decl_return to memory, then
4611 we may need to re-do the PROMOTE_MODE signed/unsigned
4612 extension. */
4613 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4615 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4617 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4618 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4619 &unsignedp, 1);
4621 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4623 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4625 /* If expand_function_start has created a PARALLEL for decl_rtl,
4626 move the result to the real return registers. Otherwise, do
4627 a group load from decl_rtl for a named return. */
4628 if (GET_CODE (decl_rtl) == PARALLEL)
4629 emit_group_move (real_decl_rtl, decl_rtl);
4630 else
4631 emit_group_load (real_decl_rtl, decl_rtl,
4632 TREE_TYPE (decl_result),
4633 int_size_in_bytes (TREE_TYPE (decl_result)));
4635 /* In the case of complex integer modes smaller than a word, we'll
4636 need to generate some non-trivial bitfield insertions. Do that
4637 on a pseudo and not the hard register. */
4638 else if (GET_CODE (decl_rtl) == CONCAT
4639 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4640 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4642 int old_generating_concat_p;
4643 rtx tmp;
4645 old_generating_concat_p = generating_concat_p;
4646 generating_concat_p = 0;
4647 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4648 generating_concat_p = old_generating_concat_p;
4650 emit_move_insn (tmp, decl_rtl);
4651 emit_move_insn (real_decl_rtl, tmp);
4653 else
4654 emit_move_insn (real_decl_rtl, decl_rtl);
4658 /* If returning a structure, arrange to return the address of the value
4659 in a place where debuggers expect to find it.
4661 If returning a structure PCC style,
4662 the caller also depends on this value.
4663 And cfun->returns_pcc_struct is not necessarily set. */
4664 if (cfun->returns_struct
4665 || cfun->returns_pcc_struct)
4667 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4668 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4669 rtx outgoing;
4671 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4672 type = TREE_TYPE (type);
4673 else
4674 value_address = XEXP (value_address, 0);
4676 outgoing = targetm.calls.function_value (build_pointer_type (type),
4677 current_function_decl, true);
4679 /* Mark this as a function return value so integrate will delete the
4680 assignment and USE below when inlining this function. */
4681 REG_FUNCTION_VALUE_P (outgoing) = 1;
4683 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4684 value_address = convert_memory_address (GET_MODE (outgoing),
4685 value_address);
4687 emit_move_insn (outgoing, value_address);
4689 /* Show return register used to hold result (in this case the address
4690 of the result. */
4691 crtl->return_rtx = outgoing;
4694 /* Emit the actual code to clobber return register. */
4696 rtx seq;
4698 start_sequence ();
4699 clobber_return_register ();
4700 expand_naked_return ();
4701 seq = get_insns ();
4702 end_sequence ();
4704 emit_insn_after (seq, clobber_after);
4707 /* Output the label for the naked return from the function. */
4708 emit_label (naked_return_label);
4710 /* @@@ This is a kludge. We want to ensure that instructions that
4711 may trap are not moved into the epilogue by scheduling, because
4712 we don't always emit unwind information for the epilogue. */
4713 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4714 emit_insn (gen_blockage ());
4716 /* If stack protection is enabled for this function, check the guard. */
4717 if (crtl->stack_protect_guard)
4718 stack_protect_epilogue ();
4720 /* If we had calls to alloca, and this machine needs
4721 an accurate stack pointer to exit the function,
4722 insert some code to save and restore the stack pointer. */
4723 if (! EXIT_IGNORE_STACK
4724 && cfun->calls_alloca)
4726 rtx tem = 0;
4728 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4729 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4732 /* ??? This should no longer be necessary since stupid is no longer with
4733 us, but there are some parts of the compiler (eg reload_combine, and
4734 sh mach_dep_reorg) that still try and compute their own lifetime info
4735 instead of using the general framework. */
4736 use_return_register ();
4740 get_arg_pointer_save_area (void)
4742 rtx ret = arg_pointer_save_area;
4744 if (! ret)
4746 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4747 arg_pointer_save_area = ret;
4750 if (! crtl->arg_pointer_save_area_init)
4752 rtx seq;
4754 /* Save the arg pointer at the beginning of the function. The
4755 generated stack slot may not be a valid memory address, so we
4756 have to check it and fix it if necessary. */
4757 start_sequence ();
4758 emit_move_insn (validize_mem (ret),
4759 crtl->args.internal_arg_pointer);
4760 seq = get_insns ();
4761 end_sequence ();
4763 push_topmost_sequence ();
4764 emit_insn_after (seq, entry_of_function ());
4765 pop_topmost_sequence ();
4768 return ret;
4771 /* Extend a vector that records the INSN_UIDs of INSNS
4772 (a list of one or more insns). */
4774 static void
4775 record_insns (rtx insns, VEC(int,heap) **vecp)
4777 rtx tmp;
4779 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4780 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4783 /* Set the locator of the insn chain starting at INSN to LOC. */
4784 static void
4785 set_insn_locators (rtx insn, int loc)
4787 while (insn != NULL_RTX)
4789 if (INSN_P (insn))
4790 INSN_LOCATOR (insn) = loc;
4791 insn = NEXT_INSN (insn);
4795 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4796 be running after reorg, SEQUENCE rtl is possible. */
4798 static int
4799 contains (const_rtx insn, VEC(int,heap) **vec)
4801 int i, j;
4803 if (NONJUMP_INSN_P (insn)
4804 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4806 int count = 0;
4807 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4808 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4809 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4810 == VEC_index (int, *vec, j))
4811 count++;
4812 return count;
4814 else
4816 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4817 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4818 return 1;
4820 return 0;
4824 prologue_epilogue_contains (const_rtx insn)
4826 if (contains (insn, &prologue))
4827 return 1;
4828 if (contains (insn, &epilogue))
4829 return 1;
4830 return 0;
4834 sibcall_epilogue_contains (const_rtx insn)
4836 if (sibcall_epilogue)
4837 return contains (insn, &sibcall_epilogue);
4838 return 0;
4841 #ifdef HAVE_return
4842 /* Insert gen_return at the end of block BB. This also means updating
4843 block_for_insn appropriately. */
4845 static void
4846 emit_return_into_block (basic_block bb)
4848 emit_jump_insn_after (gen_return (), BB_END (bb));
4850 #endif /* HAVE_return */
4852 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4853 this into place with notes indicating where the prologue ends and where
4854 the epilogue begins. Update the basic block information when possible. */
4856 static void
4857 thread_prologue_and_epilogue_insns (void)
4859 int inserted = 0;
4860 edge e;
4861 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4862 rtx seq;
4863 #endif
4864 #if defined (HAVE_epilogue) || defined(HAVE_return)
4865 rtx epilogue_end = NULL_RTX;
4866 #endif
4867 edge_iterator ei;
4869 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4870 #ifdef HAVE_prologue
4871 if (HAVE_prologue)
4873 start_sequence ();
4874 seq = gen_prologue ();
4875 emit_insn (seq);
4877 /* Insert an explicit USE for the frame pointer
4878 if the profiling is on and the frame pointer is required. */
4879 if (crtl->profile && frame_pointer_needed)
4880 emit_use (hard_frame_pointer_rtx);
4882 /* Retain a map of the prologue insns. */
4883 record_insns (seq, &prologue);
4884 emit_note (NOTE_INSN_PROLOGUE_END);
4886 #ifndef PROFILE_BEFORE_PROLOGUE
4887 /* Ensure that instructions are not moved into the prologue when
4888 profiling is on. The call to the profiling routine can be
4889 emitted within the live range of a call-clobbered register. */
4890 if (crtl->profile)
4891 emit_insn (gen_blockage ());
4892 #endif
4894 seq = get_insns ();
4895 end_sequence ();
4896 set_insn_locators (seq, prologue_locator);
4898 /* Can't deal with multiple successors of the entry block
4899 at the moment. Function should always have at least one
4900 entry point. */
4901 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4903 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4904 inserted = 1;
4906 #endif
4908 /* If the exit block has no non-fake predecessors, we don't need
4909 an epilogue. */
4910 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4911 if ((e->flags & EDGE_FAKE) == 0)
4912 break;
4913 if (e == NULL)
4914 goto epilogue_done;
4916 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4917 #ifdef HAVE_return
4918 if (optimize && HAVE_return)
4920 /* If we're allowed to generate a simple return instruction,
4921 then by definition we don't need a full epilogue. Examine
4922 the block that falls through to EXIT. If it does not
4923 contain any code, examine its predecessors and try to
4924 emit (conditional) return instructions. */
4926 basic_block last;
4927 rtx label;
4929 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4930 if (e->flags & EDGE_FALLTHRU)
4931 break;
4932 if (e == NULL)
4933 goto epilogue_done;
4934 last = e->src;
4936 /* Verify that there are no active instructions in the last block. */
4937 label = BB_END (last);
4938 while (label && !LABEL_P (label))
4940 if (active_insn_p (label))
4941 break;
4942 label = PREV_INSN (label);
4945 if (BB_HEAD (last) == label && LABEL_P (label))
4947 edge_iterator ei2;
4949 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4951 basic_block bb = e->src;
4952 rtx jump;
4954 if (bb == ENTRY_BLOCK_PTR)
4956 ei_next (&ei2);
4957 continue;
4960 jump = BB_END (bb);
4961 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4963 ei_next (&ei2);
4964 continue;
4967 /* If we have an unconditional jump, we can replace that
4968 with a simple return instruction. */
4969 if (simplejump_p (jump))
4971 emit_return_into_block (bb);
4972 delete_insn (jump);
4975 /* If we have a conditional jump, we can try to replace
4976 that with a conditional return instruction. */
4977 else if (condjump_p (jump))
4979 if (! redirect_jump (jump, 0, 0))
4981 ei_next (&ei2);
4982 continue;
4985 /* If this block has only one successor, it both jumps
4986 and falls through to the fallthru block, so we can't
4987 delete the edge. */
4988 if (single_succ_p (bb))
4990 ei_next (&ei2);
4991 continue;
4994 else
4996 ei_next (&ei2);
4997 continue;
5000 /* Fix up the CFG for the successful change we just made. */
5001 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5004 /* Emit a return insn for the exit fallthru block. Whether
5005 this is still reachable will be determined later. */
5007 emit_barrier_after (BB_END (last));
5008 emit_return_into_block (last);
5009 epilogue_end = BB_END (last);
5010 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5011 goto epilogue_done;
5014 #endif
5015 /* Find the edge that falls through to EXIT. Other edges may exist
5016 due to RETURN instructions, but those don't need epilogues.
5017 There really shouldn't be a mixture -- either all should have
5018 been converted or none, however... */
5020 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5021 if (e->flags & EDGE_FALLTHRU)
5022 break;
5023 if (e == NULL)
5024 goto epilogue_done;
5026 #ifdef HAVE_epilogue
5027 if (HAVE_epilogue)
5029 start_sequence ();
5030 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5031 seq = gen_epilogue ();
5032 emit_jump_insn (seq);
5034 /* Retain a map of the epilogue insns. */
5035 record_insns (seq, &epilogue);
5036 set_insn_locators (seq, epilogue_locator);
5038 seq = get_insns ();
5039 end_sequence ();
5041 insert_insn_on_edge (seq, e);
5042 inserted = 1;
5044 else
5045 #endif
5047 basic_block cur_bb;
5049 if (! next_active_insn (BB_END (e->src)))
5050 goto epilogue_done;
5051 /* We have a fall-through edge to the exit block, the source is not
5052 at the end of the function, and there will be an assembler epilogue
5053 at the end of the function.
5054 We can't use force_nonfallthru here, because that would try to
5055 use return. Inserting a jump 'by hand' is extremely messy, so
5056 we take advantage of cfg_layout_finalize using
5057 fixup_fallthru_exit_predecessor. */
5058 cfg_layout_initialize (0);
5059 FOR_EACH_BB (cur_bb)
5060 if (cur_bb->index >= NUM_FIXED_BLOCKS
5061 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5062 cur_bb->aux = cur_bb->next_bb;
5063 cfg_layout_finalize ();
5065 epilogue_done:
5066 default_rtl_profile ();
5068 if (inserted)
5070 commit_edge_insertions ();
5072 /* The epilogue insns we inserted may cause the exit edge to no longer
5073 be fallthru. */
5074 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5076 if (((e->flags & EDGE_FALLTHRU) != 0)
5077 && returnjump_p (BB_END (e->src)))
5078 e->flags &= ~EDGE_FALLTHRU;
5082 #ifdef HAVE_sibcall_epilogue
5083 /* Emit sibling epilogues before any sibling call sites. */
5084 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5086 basic_block bb = e->src;
5087 rtx insn = BB_END (bb);
5089 if (!CALL_P (insn)
5090 || ! SIBLING_CALL_P (insn))
5092 ei_next (&ei);
5093 continue;
5096 start_sequence ();
5097 emit_insn (gen_sibcall_epilogue ());
5098 seq = get_insns ();
5099 end_sequence ();
5101 /* Retain a map of the epilogue insns. Used in life analysis to
5102 avoid getting rid of sibcall epilogue insns. Do this before we
5103 actually emit the sequence. */
5104 record_insns (seq, &sibcall_epilogue);
5105 set_insn_locators (seq, epilogue_locator);
5107 emit_insn_before (seq, insn);
5108 ei_next (&ei);
5110 #endif
5112 #ifdef HAVE_epilogue
5113 if (epilogue_end)
5115 rtx insn, next;
5117 /* Similarly, move any line notes that appear after the epilogue.
5118 There is no need, however, to be quite so anal about the existence
5119 of such a note. Also possibly move
5120 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5121 info generation. */
5122 for (insn = epilogue_end; insn; insn = next)
5124 next = NEXT_INSN (insn);
5125 if (NOTE_P (insn)
5126 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5127 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5130 #endif
5132 /* Threading the prologue and epilogue changes the artificial refs
5133 in the entry and exit blocks. */
5134 epilogue_completed = 1;
5135 df_update_entry_exit_and_calls ();
5138 /* Reposition the prologue-end and epilogue-begin notes after instruction
5139 scheduling and delayed branch scheduling. */
5141 void
5142 reposition_prologue_and_epilogue_notes (void)
5144 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5145 rtx insn, last, note;
5146 int len;
5148 if ((len = VEC_length (int, prologue)) > 0)
5150 last = 0, note = 0;
5152 /* Scan from the beginning until we reach the last prologue insn.
5153 We apparently can't depend on basic_block_{head,end} after
5154 reorg has run. */
5155 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5157 if (NOTE_P (insn))
5159 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5160 note = insn;
5162 else if (contains (insn, &prologue))
5164 last = insn;
5165 if (--len == 0)
5166 break;
5170 if (last)
5172 /* Find the prologue-end note if we haven't already, and
5173 move it to just after the last prologue insn. */
5174 if (note == 0)
5176 for (note = last; (note = NEXT_INSN (note));)
5177 if (NOTE_P (note)
5178 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5179 break;
5182 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5183 if (LABEL_P (last))
5184 last = NEXT_INSN (last);
5185 reorder_insns (note, note, last);
5189 if ((len = VEC_length (int, epilogue)) > 0)
5191 last = 0, note = 0;
5193 /* Scan from the end until we reach the first epilogue insn.
5194 We apparently can't depend on basic_block_{head,end} after
5195 reorg has run. */
5196 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5198 if (NOTE_P (insn))
5200 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5201 note = insn;
5203 else if (contains (insn, &epilogue))
5205 last = insn;
5206 if (--len == 0)
5207 break;
5211 if (last)
5213 /* Find the epilogue-begin note if we haven't already, and
5214 move it to just before the first epilogue insn. */
5215 if (note == 0)
5217 for (note = insn; (note = PREV_INSN (note));)
5218 if (NOTE_P (note)
5219 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5220 break;
5223 if (PREV_INSN (last) != note)
5224 reorder_insns (note, note, PREV_INSN (last));
5227 #endif /* HAVE_prologue or HAVE_epilogue */
5230 /* Returns the name of the current function. */
5231 const char *
5232 current_function_name (void)
5234 return lang_hooks.decl_printable_name (cfun->decl, 2);
5237 /* Returns the raw (mangled) name of the current function. */
5238 const char *
5239 current_function_assembler_name (void)
5241 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5245 static unsigned int
5246 rest_of_handle_check_leaf_regs (void)
5248 #ifdef LEAF_REGISTERS
5249 current_function_uses_only_leaf_regs
5250 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5251 #endif
5252 return 0;
5255 /* Insert a TYPE into the used types hash table of CFUN. */
5256 static void
5257 used_types_insert_helper (tree type, struct function *func)
5259 if (type != NULL && func != NULL)
5261 void **slot;
5263 if (func->used_types_hash == NULL)
5264 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5265 htab_eq_pointer, NULL);
5266 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5267 if (*slot == NULL)
5268 *slot = type;
5272 /* Given a type, insert it into the used hash table in cfun. */
5273 void
5274 used_types_insert (tree t)
5276 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5277 t = TREE_TYPE (t);
5278 t = TYPE_MAIN_VARIANT (t);
5279 if (debug_info_level > DINFO_LEVEL_NONE)
5280 used_types_insert_helper (t, cfun);
5283 struct rtl_opt_pass pass_leaf_regs =
5286 RTL_PASS,
5287 NULL, /* name */
5288 NULL, /* gate */
5289 rest_of_handle_check_leaf_regs, /* execute */
5290 NULL, /* sub */
5291 NULL, /* next */
5292 0, /* static_pass_number */
5293 0, /* tv_id */
5294 0, /* properties_required */
5295 0, /* properties_provided */
5296 0, /* properties_destroyed */
5297 0, /* todo_flags_start */
5298 0 /* todo_flags_finish */
5302 static unsigned int
5303 rest_of_handle_thread_prologue_and_epilogue (void)
5305 if (optimize)
5306 cleanup_cfg (CLEANUP_EXPENSIVE);
5307 /* On some machines, the prologue and epilogue code, or parts thereof,
5308 can be represented as RTL. Doing so lets us schedule insns between
5309 it and the rest of the code and also allows delayed branch
5310 scheduling to operate in the epilogue. */
5312 thread_prologue_and_epilogue_insns ();
5313 return 0;
5316 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5319 RTL_PASS,
5320 "pro_and_epilogue", /* name */
5321 NULL, /* gate */
5322 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5323 NULL, /* sub */
5324 NULL, /* next */
5325 0, /* static_pass_number */
5326 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5327 0, /* properties_required */
5328 0, /* properties_provided */
5329 0, /* properties_destroyed */
5330 TODO_verify_flow, /* todo_flags_start */
5331 TODO_dump_func |
5332 TODO_df_verify |
5333 TODO_df_finish | TODO_verify_rtl_sharing |
5334 TODO_ggc_collect /* todo_flags_finish */
5339 /* This mini-pass fixes fall-out from SSA in asm statements that have
5340 in-out constraints. Say you start with
5342 orig = inout;
5343 asm ("": "+mr" (inout));
5344 use (orig);
5346 which is transformed very early to use explicit output and match operands:
5348 orig = inout;
5349 asm ("": "=mr" (inout) : "0" (inout));
5350 use (orig);
5352 Or, after SSA and copyprop,
5354 asm ("": "=mr" (inout_2) : "0" (inout_1));
5355 use (inout_1);
5357 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5358 they represent two separate values, so they will get different pseudo
5359 registers during expansion. Then, since the two operands need to match
5360 per the constraints, but use different pseudo registers, reload can
5361 only register a reload for these operands. But reloads can only be
5362 satisfied by hardregs, not by memory, so we need a register for this
5363 reload, just because we are presented with non-matching operands.
5364 So, even though we allow memory for this operand, no memory can be
5365 used for it, just because the two operands don't match. This can
5366 cause reload failures on register-starved targets.
5368 So it's a symptom of reload not being able to use memory for reloads
5369 or, alternatively it's also a symptom of both operands not coming into
5370 reload as matching (in which case the pseudo could go to memory just
5371 fine, as the alternative allows it, and no reload would be necessary).
5372 We fix the latter problem here, by transforming
5374 asm ("": "=mr" (inout_2) : "0" (inout_1));
5376 back to
5378 inout_2 = inout_1;
5379 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5381 static void
5382 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5384 int i;
5385 bool changed = false;
5386 rtx op = SET_SRC (p_sets[0]);
5387 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5388 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5389 bool *output_matched = XALLOCAVEC (bool, noutputs);
5391 memset (output_matched, 0, noutputs * sizeof (bool));
5392 for (i = 0; i < ninputs; i++)
5394 rtx input, output, insns;
5395 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5396 char *end;
5397 int match, j;
5399 match = strtoul (constraint, &end, 10);
5400 if (end == constraint)
5401 continue;
5403 gcc_assert (match < noutputs);
5404 output = SET_DEST (p_sets[match]);
5405 input = RTVEC_ELT (inputs, i);
5406 /* Only do the transformation for pseudos. */
5407 if (! REG_P (output)
5408 || rtx_equal_p (output, input)
5409 || (GET_MODE (input) != VOIDmode
5410 && GET_MODE (input) != GET_MODE (output)))
5411 continue;
5413 /* We can't do anything if the output is also used as input,
5414 as we're going to overwrite it. */
5415 for (j = 0; j < ninputs; j++)
5416 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5417 break;
5418 if (j != ninputs)
5419 continue;
5421 /* Avoid changing the same input several times. For
5422 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5423 only change in once (to out1), rather than changing it
5424 first to out1 and afterwards to out2. */
5425 if (i > 0)
5427 for (j = 0; j < noutputs; j++)
5428 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5429 break;
5430 if (j != noutputs)
5431 continue;
5433 output_matched[match] = true;
5435 start_sequence ();
5436 emit_move_insn (output, input);
5437 insns = get_insns ();
5438 end_sequence ();
5439 emit_insn_before (insns, insn);
5441 /* Now replace all mentions of the input with output. We can't
5442 just replace the occurrence in inputs[i], as the register might
5443 also be used in some other input (or even in an address of an
5444 output), which would mean possibly increasing the number of
5445 inputs by one (namely 'output' in addition), which might pose
5446 a too complicated problem for reload to solve. E.g. this situation:
5448 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5450 Here 'input' is used in two occurrences as input (once for the
5451 input operand, once for the address in the second output operand).
5452 If we would replace only the occurrence of the input operand (to
5453 make the matching) we would be left with this:
5455 output = input
5456 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5458 Now we suddenly have two different input values (containing the same
5459 value, but different pseudos) where we formerly had only one.
5460 With more complicated asms this might lead to reload failures
5461 which wouldn't have happen without this pass. So, iterate over
5462 all operands and replace all occurrences of the register used. */
5463 for (j = 0; j < noutputs; j++)
5464 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5465 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5466 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5467 input, output);
5468 for (j = 0; j < ninputs; j++)
5469 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5470 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5471 input, output);
5473 changed = true;
5476 if (changed)
5477 df_insn_rescan (insn);
5480 static unsigned
5481 rest_of_match_asm_constraints (void)
5483 basic_block bb;
5484 rtx insn, pat, *p_sets;
5485 int noutputs;
5487 if (!crtl->has_asm_statement)
5488 return 0;
5490 df_set_flags (DF_DEFER_INSN_RESCAN);
5491 FOR_EACH_BB (bb)
5493 FOR_BB_INSNS (bb, insn)
5495 if (!INSN_P (insn))
5496 continue;
5498 pat = PATTERN (insn);
5499 if (GET_CODE (pat) == PARALLEL)
5500 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5501 else if (GET_CODE (pat) == SET)
5502 p_sets = &PATTERN (insn), noutputs = 1;
5503 else
5504 continue;
5506 if (GET_CODE (*p_sets) == SET
5507 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5508 match_asm_constraints_1 (insn, p_sets, noutputs);
5512 return TODO_df_finish;
5515 struct rtl_opt_pass pass_match_asm_constraints =
5518 RTL_PASS,
5519 "asmcons", /* name */
5520 NULL, /* gate */
5521 rest_of_match_asm_constraints, /* execute */
5522 NULL, /* sub */
5523 NULL, /* next */
5524 0, /* static_pass_number */
5525 0, /* tv_id */
5526 0, /* properties_required */
5527 0, /* properties_provided */
5528 0, /* properties_destroyed */
5529 0, /* todo_flags_start */
5530 TODO_dump_func /* todo_flags_finish */
5535 #include "gt-function.h"