2008-07-03 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / function.c
blob845017f7124b09bd93d66cbc8dc6a773b2b350d7
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
69 /* So we can assign to cfun in this file. */
70 #undef cfun
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
161 rtx slot;
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
165 /* The alignment (in bits) of the slot. */
166 unsigned int align;
167 /* The size, in units, of the slot. */
168 HOST_WIDE_INT size;
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
174 /* Nonzero if this temporary is currently in use. */
175 char in_use;
176 /* Nonzero if this temporary has its address taken. */
177 char addr_taken;
178 /* Nesting level at which this slot is being used. */
179 int level;
180 /* Nonzero if this should survive a call to free_temp_slots. */
181 int keep;
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
203 #ifdef HAVE_return
204 static void emit_return_into_block (basic_block);
205 #endif
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
211 /* Pointer to chain of `struct function' for containing functions. */
212 struct function *outer_function_chain;
214 /* Given a function decl for a containing function,
215 return the `struct function' for it. */
217 struct function *
218 find_function_data (tree decl)
220 struct function *p;
222 for (p = outer_function_chain; p; p = p->outer)
223 if (p->decl == decl)
224 return p;
226 gcc_unreachable ();
229 /* Save the current context for compilation of a nested function.
230 This is called from language-specific code. */
232 void
233 push_function_context (void)
235 if (cfun == 0)
236 allocate_struct_function (NULL, false);
238 cfun->outer = outer_function_chain;
239 outer_function_chain = cfun;
240 set_cfun (NULL);
243 /* Restore the last saved context, at the end of a nested function.
244 This function is called from language-specific code. */
246 void
247 pop_function_context (void)
249 struct function *p = outer_function_chain;
251 set_cfun (p);
252 outer_function_chain = p->outer;
253 current_function_decl = p->decl;
255 /* Reset variables that have known state during rtx generation. */
256 virtuals_instantiated = 0;
257 generating_concat_p = 1;
260 /* Clear out all parts of the state in F that can safely be discarded
261 after the function has been parsed, but not compiled, to let
262 garbage collection reclaim the memory. */
264 void
265 free_after_parsing (struct function *f)
267 f->language = 0;
270 /* Clear out all parts of the state in F that can safely be discarded
271 after the function has been compiled, to let garbage collection
272 reclaim the memory. */
274 void
275 free_after_compilation (struct function *f)
277 VEC_free (int, heap, prologue);
278 VEC_free (int, heap, epilogue);
279 VEC_free (int, heap, sibcall_epilogue);
280 if (crtl->emit.regno_pointer_align)
281 free (crtl->emit.regno_pointer_align);
283 memset (crtl, 0, sizeof (struct rtl_data));
284 f->eh = NULL;
285 f->machine = NULL;
286 f->cfg = NULL;
288 regno_reg_rtx = NULL;
291 /* Return size needed for stack frame based on slots so far allocated.
292 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
293 the caller may have to do that. */
295 HOST_WIDE_INT
296 get_frame_size (void)
298 if (FRAME_GROWS_DOWNWARD)
299 return -frame_offset;
300 else
301 return frame_offset;
304 /* Issue an error message and return TRUE if frame OFFSET overflows in
305 the signed target pointer arithmetics for function FUNC. Otherwise
306 return FALSE. */
308 bool
309 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
311 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
313 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
314 /* Leave room for the fixed part of the frame. */
315 - 64 * UNITS_PER_WORD)
317 error ("%Jtotal size of local objects too large", func);
318 return TRUE;
321 return FALSE;
324 /* Return stack slot alignment in bits for TYPE and MODE. */
326 static unsigned int
327 get_stack_local_alignment (tree type, enum machine_mode mode)
329 unsigned int alignment;
331 if (mode == BLKmode)
332 alignment = BIGGEST_ALIGNMENT;
333 else
334 alignment = GET_MODE_ALIGNMENT (mode);
336 /* Allow the frond-end to (possibly) increase the alignment of this
337 stack slot. */
338 if (! type)
339 type = lang_hooks.types.type_for_mode (mode, 0);
341 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
344 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
345 with machine mode MODE.
347 ALIGN controls the amount of alignment for the address of the slot:
348 0 means according to MODE,
349 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
350 -2 means use BITS_PER_UNIT,
351 positive specifies alignment boundary in bits.
353 We do not round to stack_boundary here. */
356 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
358 rtx x, addr;
359 int bigend_correction = 0;
360 unsigned int alignment, alignment_in_bits;
361 int frame_off, frame_alignment, frame_phase;
363 if (align == 0)
365 alignment = get_stack_local_alignment (NULL, mode);
366 alignment /= BITS_PER_UNIT;
368 else if (align == -1)
370 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
371 size = CEIL_ROUND (size, alignment);
373 else if (align == -2)
374 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
375 else
376 alignment = align / BITS_PER_UNIT;
378 if (FRAME_GROWS_DOWNWARD)
379 frame_offset -= size;
381 /* Ignore alignment we can't do with expected alignment of the boundary. */
382 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
383 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
385 alignment_in_bits = alignment * BITS_PER_UNIT;
387 if (crtl->stack_alignment_needed < alignment_in_bits)
388 crtl->stack_alignment_needed = alignment_in_bits;
390 /* Calculate how many bytes the start of local variables is off from
391 stack alignment. */
392 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
393 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
394 frame_phase = frame_off ? frame_alignment - frame_off : 0;
396 /* Round the frame offset to the specified alignment. The default is
397 to always honor requests to align the stack but a port may choose to
398 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
399 if (STACK_ALIGNMENT_NEEDED
400 || mode != BLKmode
401 || size != 0)
403 /* We must be careful here, since FRAME_OFFSET might be negative and
404 division with a negative dividend isn't as well defined as we might
405 like. So we instead assume that ALIGNMENT is a power of two and
406 use logical operations which are unambiguous. */
407 if (FRAME_GROWS_DOWNWARD)
408 frame_offset
409 = (FLOOR_ROUND (frame_offset - frame_phase,
410 (unsigned HOST_WIDE_INT) alignment)
411 + frame_phase);
412 else
413 frame_offset
414 = (CEIL_ROUND (frame_offset - frame_phase,
415 (unsigned HOST_WIDE_INT) alignment)
416 + frame_phase);
419 /* On a big-endian machine, if we are allocating more space than we will use,
420 use the least significant bytes of those that are allocated. */
421 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
422 bigend_correction = size - GET_MODE_SIZE (mode);
424 /* If we have already instantiated virtual registers, return the actual
425 address relative to the frame pointer. */
426 if (virtuals_instantiated)
427 addr = plus_constant (frame_pointer_rtx,
428 trunc_int_for_mode
429 (frame_offset + bigend_correction
430 + STARTING_FRAME_OFFSET, Pmode));
431 else
432 addr = plus_constant (virtual_stack_vars_rtx,
433 trunc_int_for_mode
434 (frame_offset + bigend_correction,
435 Pmode));
437 if (!FRAME_GROWS_DOWNWARD)
438 frame_offset += size;
440 x = gen_rtx_MEM (mode, addr);
441 set_mem_align (x, alignment_in_bits);
442 MEM_NOTRAP_P (x) = 1;
444 stack_slot_list
445 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
447 if (frame_offset_overflow (frame_offset, current_function_decl))
448 frame_offset = 0;
450 return x;
453 /* Removes temporary slot TEMP from LIST. */
455 static void
456 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
458 if (temp->next)
459 temp->next->prev = temp->prev;
460 if (temp->prev)
461 temp->prev->next = temp->next;
462 else
463 *list = temp->next;
465 temp->prev = temp->next = NULL;
468 /* Inserts temporary slot TEMP to LIST. */
470 static void
471 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
473 temp->next = *list;
474 if (*list)
475 (*list)->prev = temp;
476 temp->prev = NULL;
477 *list = temp;
480 /* Returns the list of used temp slots at LEVEL. */
482 static struct temp_slot **
483 temp_slots_at_level (int level)
485 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
486 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
488 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
491 /* Returns the maximal temporary slot level. */
493 static int
494 max_slot_level (void)
496 if (!used_temp_slots)
497 return -1;
499 return VEC_length (temp_slot_p, used_temp_slots) - 1;
502 /* Moves temporary slot TEMP to LEVEL. */
504 static void
505 move_slot_to_level (struct temp_slot *temp, int level)
507 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
508 insert_slot_to_list (temp, temp_slots_at_level (level));
509 temp->level = level;
512 /* Make temporary slot TEMP available. */
514 static void
515 make_slot_available (struct temp_slot *temp)
517 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
518 insert_slot_to_list (temp, &avail_temp_slots);
519 temp->in_use = 0;
520 temp->level = -1;
523 /* Allocate a temporary stack slot and record it for possible later
524 reuse.
526 MODE is the machine mode to be given to the returned rtx.
528 SIZE is the size in units of the space required. We do no rounding here
529 since assign_stack_local will do any required rounding.
531 KEEP is 1 if this slot is to be retained after a call to
532 free_temp_slots. Automatic variables for a block are allocated
533 with this flag. KEEP values of 2 or 3 were needed respectively
534 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535 or for SAVE_EXPRs, but they are now unused.
537 TYPE is the type that will be used for the stack slot. */
540 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
541 int keep, tree type)
543 unsigned int align;
544 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
545 rtx slot;
547 /* If SIZE is -1 it means that somebody tried to allocate a temporary
548 of a variable size. */
549 gcc_assert (size != -1);
551 /* These are now unused. */
552 gcc_assert (keep <= 1);
554 align = get_stack_local_alignment (type, mode);
556 /* Try to find an available, already-allocated temporary of the proper
557 mode which meets the size and alignment requirements. Choose the
558 smallest one with the closest alignment.
560 If assign_stack_temp is called outside of the tree->rtl expansion,
561 we cannot reuse the stack slots (that may still refer to
562 VIRTUAL_STACK_VARS_REGNUM). */
563 if (!virtuals_instantiated)
565 for (p = avail_temp_slots; p; p = p->next)
567 if (p->align >= align && p->size >= size
568 && GET_MODE (p->slot) == mode
569 && objects_must_conflict_p (p->type, type)
570 && (best_p == 0 || best_p->size > p->size
571 || (best_p->size == p->size && best_p->align > p->align)))
573 if (p->align == align && p->size == size)
575 selected = p;
576 cut_slot_from_list (selected, &avail_temp_slots);
577 best_p = 0;
578 break;
580 best_p = p;
585 /* Make our best, if any, the one to use. */
586 if (best_p)
588 selected = best_p;
589 cut_slot_from_list (selected, &avail_temp_slots);
591 /* If there are enough aligned bytes left over, make them into a new
592 temp_slot so that the extra bytes don't get wasted. Do this only
593 for BLKmode slots, so that we can be sure of the alignment. */
594 if (GET_MODE (best_p->slot) == BLKmode)
596 int alignment = best_p->align / BITS_PER_UNIT;
597 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
599 if (best_p->size - rounded_size >= alignment)
601 p = ggc_alloc (sizeof (struct temp_slot));
602 p->in_use = p->addr_taken = 0;
603 p->size = best_p->size - rounded_size;
604 p->base_offset = best_p->base_offset + rounded_size;
605 p->full_size = best_p->full_size - rounded_size;
606 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
607 p->align = best_p->align;
608 p->address = 0;
609 p->type = best_p->type;
610 insert_slot_to_list (p, &avail_temp_slots);
612 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
613 stack_slot_list);
615 best_p->size = rounded_size;
616 best_p->full_size = rounded_size;
621 /* If we still didn't find one, make a new temporary. */
622 if (selected == 0)
624 HOST_WIDE_INT frame_offset_old = frame_offset;
626 p = ggc_alloc (sizeof (struct temp_slot));
628 /* We are passing an explicit alignment request to assign_stack_local.
629 One side effect of that is assign_stack_local will not round SIZE
630 to ensure the frame offset remains suitably aligned.
632 So for requests which depended on the rounding of SIZE, we go ahead
633 and round it now. We also make sure ALIGNMENT is at least
634 BIGGEST_ALIGNMENT. */
635 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
636 p->slot = assign_stack_local (mode,
637 (mode == BLKmode
638 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
639 : size),
640 align);
642 p->align = align;
644 /* The following slot size computation is necessary because we don't
645 know the actual size of the temporary slot until assign_stack_local
646 has performed all the frame alignment and size rounding for the
647 requested temporary. Note that extra space added for alignment
648 can be either above or below this stack slot depending on which
649 way the frame grows. We include the extra space if and only if it
650 is above this slot. */
651 if (FRAME_GROWS_DOWNWARD)
652 p->size = frame_offset_old - frame_offset;
653 else
654 p->size = size;
656 /* Now define the fields used by combine_temp_slots. */
657 if (FRAME_GROWS_DOWNWARD)
659 p->base_offset = frame_offset;
660 p->full_size = frame_offset_old - frame_offset;
662 else
664 p->base_offset = frame_offset_old;
665 p->full_size = frame_offset - frame_offset_old;
667 p->address = 0;
669 selected = p;
672 p = selected;
673 p->in_use = 1;
674 p->addr_taken = 0;
675 p->type = type;
676 p->level = temp_slot_level;
677 p->keep = keep;
679 pp = temp_slots_at_level (p->level);
680 insert_slot_to_list (p, pp);
682 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
683 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
684 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
686 /* If we know the alias set for the memory that will be used, use
687 it. If there's no TYPE, then we don't know anything about the
688 alias set for the memory. */
689 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
690 set_mem_align (slot, align);
692 /* If a type is specified, set the relevant flags. */
693 if (type != 0)
695 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
696 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
697 || TREE_CODE (type) == COMPLEX_TYPE));
699 MEM_NOTRAP_P (slot) = 1;
701 return slot;
704 /* Allocate a temporary stack slot and record it for possible later
705 reuse. First three arguments are same as in preceding function. */
708 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
710 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
713 /* Assign a temporary.
714 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
715 and so that should be used in error messages. In either case, we
716 allocate of the given type.
717 KEEP is as for assign_stack_temp.
718 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
719 it is 0 if a register is OK.
720 DONT_PROMOTE is 1 if we should not promote values in register
721 to wider modes. */
724 assign_temp (tree type_or_decl, int keep, int memory_required,
725 int dont_promote ATTRIBUTE_UNUSED)
727 tree type, decl;
728 enum machine_mode mode;
729 #ifdef PROMOTE_MODE
730 int unsignedp;
731 #endif
733 if (DECL_P (type_or_decl))
734 decl = type_or_decl, type = TREE_TYPE (decl);
735 else
736 decl = NULL, type = type_or_decl;
738 mode = TYPE_MODE (type);
739 #ifdef PROMOTE_MODE
740 unsignedp = TYPE_UNSIGNED (type);
741 #endif
743 if (mode == BLKmode || memory_required)
745 HOST_WIDE_INT size = int_size_in_bytes (type);
746 rtx tmp;
748 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
749 problems with allocating the stack space. */
750 if (size == 0)
751 size = 1;
753 /* Unfortunately, we don't yet know how to allocate variable-sized
754 temporaries. However, sometimes we can find a fixed upper limit on
755 the size, so try that instead. */
756 else if (size == -1)
757 size = max_int_size_in_bytes (type);
759 /* The size of the temporary may be too large to fit into an integer. */
760 /* ??? Not sure this should happen except for user silliness, so limit
761 this to things that aren't compiler-generated temporaries. The
762 rest of the time we'll die in assign_stack_temp_for_type. */
763 if (decl && size == -1
764 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
766 error ("size of variable %q+D is too large", decl);
767 size = 1;
770 tmp = assign_stack_temp_for_type (mode, size, keep, type);
771 return tmp;
774 #ifdef PROMOTE_MODE
775 if (! dont_promote)
776 mode = promote_mode (type, mode, &unsignedp, 0);
777 #endif
779 return gen_reg_rtx (mode);
782 /* Combine temporary stack slots which are adjacent on the stack.
784 This allows for better use of already allocated stack space. This is only
785 done for BLKmode slots because we can be sure that we won't have alignment
786 problems in this case. */
788 static void
789 combine_temp_slots (void)
791 struct temp_slot *p, *q, *next, *next_q;
792 int num_slots;
794 /* We can't combine slots, because the information about which slot
795 is in which alias set will be lost. */
796 if (flag_strict_aliasing)
797 return;
799 /* If there are a lot of temp slots, don't do anything unless
800 high levels of optimization. */
801 if (! flag_expensive_optimizations)
802 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
803 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
804 return;
806 for (p = avail_temp_slots; p; p = next)
808 int delete_p = 0;
810 next = p->next;
812 if (GET_MODE (p->slot) != BLKmode)
813 continue;
815 for (q = p->next; q; q = next_q)
817 int delete_q = 0;
819 next_q = q->next;
821 if (GET_MODE (q->slot) != BLKmode)
822 continue;
824 if (p->base_offset + p->full_size == q->base_offset)
826 /* Q comes after P; combine Q into P. */
827 p->size += q->size;
828 p->full_size += q->full_size;
829 delete_q = 1;
831 else if (q->base_offset + q->full_size == p->base_offset)
833 /* P comes after Q; combine P into Q. */
834 q->size += p->size;
835 q->full_size += p->full_size;
836 delete_p = 1;
837 break;
839 if (delete_q)
840 cut_slot_from_list (q, &avail_temp_slots);
843 /* Either delete P or advance past it. */
844 if (delete_p)
845 cut_slot_from_list (p, &avail_temp_slots);
849 /* Find the temp slot corresponding to the object at address X. */
851 static struct temp_slot *
852 find_temp_slot_from_address (rtx x)
854 struct temp_slot *p;
855 rtx next;
856 int i;
858 for (i = max_slot_level (); i >= 0; i--)
859 for (p = *temp_slots_at_level (i); p; p = p->next)
861 if (XEXP (p->slot, 0) == x
862 || p->address == x
863 || (GET_CODE (x) == PLUS
864 && XEXP (x, 0) == virtual_stack_vars_rtx
865 && GET_CODE (XEXP (x, 1)) == CONST_INT
866 && INTVAL (XEXP (x, 1)) >= p->base_offset
867 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
868 return p;
870 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
871 for (next = p->address; next; next = XEXP (next, 1))
872 if (XEXP (next, 0) == x)
873 return p;
876 /* If we have a sum involving a register, see if it points to a temp
877 slot. */
878 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
879 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
880 return p;
881 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
882 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
883 return p;
885 return 0;
888 /* Indicate that NEW is an alternate way of referring to the temp slot
889 that previously was known by OLD. */
891 void
892 update_temp_slot_address (rtx old, rtx new)
894 struct temp_slot *p;
896 if (rtx_equal_p (old, new))
897 return;
899 p = find_temp_slot_from_address (old);
901 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
902 is a register, see if one operand of the PLUS is a temporary
903 location. If so, NEW points into it. Otherwise, if both OLD and
904 NEW are a PLUS and if there is a register in common between them.
905 If so, try a recursive call on those values. */
906 if (p == 0)
908 if (GET_CODE (old) != PLUS)
909 return;
911 if (REG_P (new))
913 update_temp_slot_address (XEXP (old, 0), new);
914 update_temp_slot_address (XEXP (old, 1), new);
915 return;
917 else if (GET_CODE (new) != PLUS)
918 return;
920 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
921 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
922 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
923 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
924 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
925 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
926 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
927 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
929 return;
932 /* Otherwise add an alias for the temp's address. */
933 else if (p->address == 0)
934 p->address = new;
935 else
937 if (GET_CODE (p->address) != EXPR_LIST)
938 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
940 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
944 /* If X could be a reference to a temporary slot, mark the fact that its
945 address was taken. */
947 void
948 mark_temp_addr_taken (rtx x)
950 struct temp_slot *p;
952 if (x == 0)
953 return;
955 /* If X is not in memory or is at a constant address, it cannot be in
956 a temporary slot. */
957 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
958 return;
960 p = find_temp_slot_from_address (XEXP (x, 0));
961 if (p != 0)
962 p->addr_taken = 1;
965 /* If X could be a reference to a temporary slot, mark that slot as
966 belonging to the to one level higher than the current level. If X
967 matched one of our slots, just mark that one. Otherwise, we can't
968 easily predict which it is, so upgrade all of them. Kept slots
969 need not be touched.
971 This is called when an ({...}) construct occurs and a statement
972 returns a value in memory. */
974 void
975 preserve_temp_slots (rtx x)
977 struct temp_slot *p = 0, *next;
979 /* If there is no result, we still might have some objects whose address
980 were taken, so we need to make sure they stay around. */
981 if (x == 0)
983 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
985 next = p->next;
987 if (p->addr_taken)
988 move_slot_to_level (p, temp_slot_level - 1);
991 return;
994 /* If X is a register that is being used as a pointer, see if we have
995 a temporary slot we know it points to. To be consistent with
996 the code below, we really should preserve all non-kept slots
997 if we can't find a match, but that seems to be much too costly. */
998 if (REG_P (x) && REG_POINTER (x))
999 p = find_temp_slot_from_address (x);
1001 /* If X is not in memory or is at a constant address, it cannot be in
1002 a temporary slot, but it can contain something whose address was
1003 taken. */
1004 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1006 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1008 next = p->next;
1010 if (p->addr_taken)
1011 move_slot_to_level (p, temp_slot_level - 1);
1014 return;
1017 /* First see if we can find a match. */
1018 if (p == 0)
1019 p = find_temp_slot_from_address (XEXP (x, 0));
1021 if (p != 0)
1023 /* Move everything at our level whose address was taken to our new
1024 level in case we used its address. */
1025 struct temp_slot *q;
1027 if (p->level == temp_slot_level)
1029 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1031 next = q->next;
1033 if (p != q && q->addr_taken)
1034 move_slot_to_level (q, temp_slot_level - 1);
1037 move_slot_to_level (p, temp_slot_level - 1);
1038 p->addr_taken = 0;
1040 return;
1043 /* Otherwise, preserve all non-kept slots at this level. */
1044 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1046 next = p->next;
1048 if (!p->keep)
1049 move_slot_to_level (p, temp_slot_level - 1);
1053 /* Free all temporaries used so far. This is normally called at the
1054 end of generating code for a statement. */
1056 void
1057 free_temp_slots (void)
1059 struct temp_slot *p, *next;
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1063 next = p->next;
1065 if (!p->keep)
1066 make_slot_available (p);
1069 combine_temp_slots ();
1072 /* Push deeper into the nesting level for stack temporaries. */
1074 void
1075 push_temp_slots (void)
1077 temp_slot_level++;
1080 /* Pop a temporary nesting level. All slots in use in the current level
1081 are freed. */
1083 void
1084 pop_temp_slots (void)
1086 struct temp_slot *p, *next;
1088 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1090 next = p->next;
1091 make_slot_available (p);
1094 combine_temp_slots ();
1096 temp_slot_level--;
1099 /* Initialize temporary slots. */
1101 void
1102 init_temp_slots (void)
1104 /* We have not allocated any temporaries yet. */
1105 avail_temp_slots = 0;
1106 used_temp_slots = 0;
1107 temp_slot_level = 0;
1110 /* These routines are responsible for converting virtual register references
1111 to the actual hard register references once RTL generation is complete.
1113 The following four variables are used for communication between the
1114 routines. They contain the offsets of the virtual registers from their
1115 respective hard registers. */
1117 static int in_arg_offset;
1118 static int var_offset;
1119 static int dynamic_offset;
1120 static int out_arg_offset;
1121 static int cfa_offset;
1123 /* In most machines, the stack pointer register is equivalent to the bottom
1124 of the stack. */
1126 #ifndef STACK_POINTER_OFFSET
1127 #define STACK_POINTER_OFFSET 0
1128 #endif
1130 /* If not defined, pick an appropriate default for the offset of dynamically
1131 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1132 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1134 #ifndef STACK_DYNAMIC_OFFSET
1136 /* The bottom of the stack points to the actual arguments. If
1137 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1138 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1139 stack space for register parameters is not pushed by the caller, but
1140 rather part of the fixed stack areas and hence not included in
1141 `crtl->outgoing_args_size'. Nevertheless, we must allow
1142 for it when allocating stack dynamic objects. */
1144 #if defined(REG_PARM_STACK_SPACE)
1145 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1146 ((ACCUMULATE_OUTGOING_ARGS \
1147 ? (crtl->outgoing_args_size \
1148 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1149 : REG_PARM_STACK_SPACE (FNDECL))) \
1150 : 0) + (STACK_POINTER_OFFSET))
1151 #else
1152 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1153 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1154 + (STACK_POINTER_OFFSET))
1155 #endif
1156 #endif
1159 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1160 is a virtual register, return the equivalent hard register and set the
1161 offset indirectly through the pointer. Otherwise, return 0. */
1163 static rtx
1164 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1166 rtx new;
1167 HOST_WIDE_INT offset;
1169 if (x == virtual_incoming_args_rtx)
1170 new = arg_pointer_rtx, offset = in_arg_offset;
1171 else if (x == virtual_stack_vars_rtx)
1172 new = frame_pointer_rtx, offset = var_offset;
1173 else if (x == virtual_stack_dynamic_rtx)
1174 new = stack_pointer_rtx, offset = dynamic_offset;
1175 else if (x == virtual_outgoing_args_rtx)
1176 new = stack_pointer_rtx, offset = out_arg_offset;
1177 else if (x == virtual_cfa_rtx)
1179 #ifdef FRAME_POINTER_CFA_OFFSET
1180 new = frame_pointer_rtx;
1181 #else
1182 new = arg_pointer_rtx;
1183 #endif
1184 offset = cfa_offset;
1186 else
1187 return NULL_RTX;
1189 *poffset = offset;
1190 return new;
1193 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1194 Instantiate any virtual registers present inside of *LOC. The expression
1195 is simplified, as much as possible, but is not to be considered "valid"
1196 in any sense implied by the target. If any change is made, set CHANGED
1197 to true. */
1199 static int
1200 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1202 HOST_WIDE_INT offset;
1203 bool *changed = (bool *) data;
1204 rtx x, new;
1206 x = *loc;
1207 if (x == 0)
1208 return 0;
1210 switch (GET_CODE (x))
1212 case REG:
1213 new = instantiate_new_reg (x, &offset);
1214 if (new)
1216 *loc = plus_constant (new, offset);
1217 if (changed)
1218 *changed = true;
1220 return -1;
1222 case PLUS:
1223 new = instantiate_new_reg (XEXP (x, 0), &offset);
1224 if (new)
1226 new = plus_constant (new, offset);
1227 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1228 if (changed)
1229 *changed = true;
1230 return -1;
1233 /* FIXME -- from old code */
1234 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1235 we can commute the PLUS and SUBREG because pointers into the
1236 frame are well-behaved. */
1237 break;
1239 default:
1240 break;
1243 return 0;
1246 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1247 matches the predicate for insn CODE operand OPERAND. */
1249 static int
1250 safe_insn_predicate (int code, int operand, rtx x)
1252 const struct insn_operand_data *op_data;
1254 if (code < 0)
1255 return true;
1257 op_data = &insn_data[code].operand[operand];
1258 if (op_data->predicate == NULL)
1259 return true;
1261 return op_data->predicate (x, op_data->mode);
1264 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1265 registers present inside of insn. The result will be a valid insn. */
1267 static void
1268 instantiate_virtual_regs_in_insn (rtx insn)
1270 HOST_WIDE_INT offset;
1271 int insn_code, i;
1272 bool any_change = false;
1273 rtx set, new, x, seq;
1275 /* There are some special cases to be handled first. */
1276 set = single_set (insn);
1277 if (set)
1279 /* We're allowed to assign to a virtual register. This is interpreted
1280 to mean that the underlying register gets assigned the inverse
1281 transformation. This is used, for example, in the handling of
1282 non-local gotos. */
1283 new = instantiate_new_reg (SET_DEST (set), &offset);
1284 if (new)
1286 start_sequence ();
1288 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1289 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1290 GEN_INT (-offset));
1291 x = force_operand (x, new);
1292 if (x != new)
1293 emit_move_insn (new, x);
1295 seq = get_insns ();
1296 end_sequence ();
1298 emit_insn_before (seq, insn);
1299 delete_insn (insn);
1300 return;
1303 /* Handle a straight copy from a virtual register by generating a
1304 new add insn. The difference between this and falling through
1305 to the generic case is avoiding a new pseudo and eliminating a
1306 move insn in the initial rtl stream. */
1307 new = instantiate_new_reg (SET_SRC (set), &offset);
1308 if (new && offset != 0
1309 && REG_P (SET_DEST (set))
1310 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1312 start_sequence ();
1314 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1315 new, GEN_INT (offset), SET_DEST (set),
1316 1, OPTAB_LIB_WIDEN);
1317 if (x != SET_DEST (set))
1318 emit_move_insn (SET_DEST (set), x);
1320 seq = get_insns ();
1321 end_sequence ();
1323 emit_insn_before (seq, insn);
1324 delete_insn (insn);
1325 return;
1328 extract_insn (insn);
1329 insn_code = INSN_CODE (insn);
1331 /* Handle a plus involving a virtual register by determining if the
1332 operands remain valid if they're modified in place. */
1333 if (GET_CODE (SET_SRC (set)) == PLUS
1334 && recog_data.n_operands >= 3
1335 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1336 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1337 && GET_CODE (recog_data.operand[2]) == CONST_INT
1338 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1340 offset += INTVAL (recog_data.operand[2]);
1342 /* If the sum is zero, then replace with a plain move. */
1343 if (offset == 0
1344 && REG_P (SET_DEST (set))
1345 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1347 start_sequence ();
1348 emit_move_insn (SET_DEST (set), new);
1349 seq = get_insns ();
1350 end_sequence ();
1352 emit_insn_before (seq, insn);
1353 delete_insn (insn);
1354 return;
1357 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1359 /* Using validate_change and apply_change_group here leaves
1360 recog_data in an invalid state. Since we know exactly what
1361 we want to check, do those two by hand. */
1362 if (safe_insn_predicate (insn_code, 1, new)
1363 && safe_insn_predicate (insn_code, 2, x))
1365 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1366 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1367 any_change = true;
1369 /* Fall through into the regular operand fixup loop in
1370 order to take care of operands other than 1 and 2. */
1374 else
1376 extract_insn (insn);
1377 insn_code = INSN_CODE (insn);
1380 /* In the general case, we expect virtual registers to appear only in
1381 operands, and then only as either bare registers or inside memories. */
1382 for (i = 0; i < recog_data.n_operands; ++i)
1384 x = recog_data.operand[i];
1385 switch (GET_CODE (x))
1387 case MEM:
1389 rtx addr = XEXP (x, 0);
1390 bool changed = false;
1392 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1393 if (!changed)
1394 continue;
1396 start_sequence ();
1397 x = replace_equiv_address (x, addr);
1398 /* It may happen that the address with the virtual reg
1399 was valid (e.g. based on the virtual stack reg, which might
1400 be acceptable to the predicates with all offsets), whereas
1401 the address now isn't anymore, for instance when the address
1402 is still offsetted, but the base reg isn't virtual-stack-reg
1403 anymore. Below we would do a force_reg on the whole operand,
1404 but this insn might actually only accept memory. Hence,
1405 before doing that last resort, try to reload the address into
1406 a register, so this operand stays a MEM. */
1407 if (!safe_insn_predicate (insn_code, i, x))
1409 addr = force_reg (GET_MODE (addr), addr);
1410 x = replace_equiv_address (x, addr);
1412 seq = get_insns ();
1413 end_sequence ();
1414 if (seq)
1415 emit_insn_before (seq, insn);
1417 break;
1419 case REG:
1420 new = instantiate_new_reg (x, &offset);
1421 if (new == NULL)
1422 continue;
1423 if (offset == 0)
1424 x = new;
1425 else
1427 start_sequence ();
1429 /* Careful, special mode predicates may have stuff in
1430 insn_data[insn_code].operand[i].mode that isn't useful
1431 to us for computing a new value. */
1432 /* ??? Recognize address_operand and/or "p" constraints
1433 to see if (plus new offset) is a valid before we put
1434 this through expand_simple_binop. */
1435 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1436 GEN_INT (offset), NULL_RTX,
1437 1, OPTAB_LIB_WIDEN);
1438 seq = get_insns ();
1439 end_sequence ();
1440 emit_insn_before (seq, insn);
1442 break;
1444 case SUBREG:
1445 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1446 if (new == NULL)
1447 continue;
1448 if (offset != 0)
1450 start_sequence ();
1451 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1452 GEN_INT (offset), NULL_RTX,
1453 1, OPTAB_LIB_WIDEN);
1454 seq = get_insns ();
1455 end_sequence ();
1456 emit_insn_before (seq, insn);
1458 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1459 GET_MODE (new), SUBREG_BYTE (x));
1460 break;
1462 default:
1463 continue;
1466 /* At this point, X contains the new value for the operand.
1467 Validate the new value vs the insn predicate. Note that
1468 asm insns will have insn_code -1 here. */
1469 if (!safe_insn_predicate (insn_code, i, x))
1471 start_sequence ();
1472 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1473 seq = get_insns ();
1474 end_sequence ();
1475 if (seq)
1476 emit_insn_before (seq, insn);
1479 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1480 any_change = true;
1483 if (any_change)
1485 /* Propagate operand changes into the duplicates. */
1486 for (i = 0; i < recog_data.n_dups; ++i)
1487 *recog_data.dup_loc[i]
1488 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1490 /* Force re-recognition of the instruction for validation. */
1491 INSN_CODE (insn) = -1;
1494 if (asm_noperands (PATTERN (insn)) >= 0)
1496 if (!check_asm_operands (PATTERN (insn)))
1498 error_for_asm (insn, "impossible constraint in %<asm%>");
1499 delete_insn (insn);
1502 else
1504 if (recog_memoized (insn) < 0)
1505 fatal_insn_not_found (insn);
1509 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1510 do any instantiation required. */
1512 void
1513 instantiate_decl_rtl (rtx x)
1515 rtx addr;
1517 if (x == 0)
1518 return;
1520 /* If this is a CONCAT, recurse for the pieces. */
1521 if (GET_CODE (x) == CONCAT)
1523 instantiate_decl_rtl (XEXP (x, 0));
1524 instantiate_decl_rtl (XEXP (x, 1));
1525 return;
1528 /* If this is not a MEM, no need to do anything. Similarly if the
1529 address is a constant or a register that is not a virtual register. */
1530 if (!MEM_P (x))
1531 return;
1533 addr = XEXP (x, 0);
1534 if (CONSTANT_P (addr)
1535 || (REG_P (addr)
1536 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1537 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1538 return;
1540 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1543 /* Helper for instantiate_decls called via walk_tree: Process all decls
1544 in the given DECL_VALUE_EXPR. */
1546 static tree
1547 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1549 tree t = *tp;
1550 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1552 *walk_subtrees = 0;
1553 if (DECL_P (t) && DECL_RTL_SET_P (t))
1554 instantiate_decl_rtl (DECL_RTL (t));
1556 return NULL;
1559 /* Subroutine of instantiate_decls: Process all decls in the given
1560 BLOCK node and all its subblocks. */
1562 static void
1563 instantiate_decls_1 (tree let)
1565 tree t;
1567 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1569 if (DECL_RTL_SET_P (t))
1570 instantiate_decl_rtl (DECL_RTL (t));
1571 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1573 tree v = DECL_VALUE_EXPR (t);
1574 walk_tree (&v, instantiate_expr, NULL, NULL);
1578 /* Process all subblocks. */
1579 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1580 instantiate_decls_1 (t);
1583 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1584 all virtual registers in their DECL_RTL's. */
1586 static void
1587 instantiate_decls (tree fndecl)
1589 tree decl;
1591 /* Process all parameters of the function. */
1592 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1594 instantiate_decl_rtl (DECL_RTL (decl));
1595 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1596 if (DECL_HAS_VALUE_EXPR_P (decl))
1598 tree v = DECL_VALUE_EXPR (decl);
1599 walk_tree (&v, instantiate_expr, NULL, NULL);
1603 /* Now process all variables defined in the function or its subblocks. */
1604 instantiate_decls_1 (DECL_INITIAL (fndecl));
1607 /* Pass through the INSNS of function FNDECL and convert virtual register
1608 references to hard register references. */
1610 static unsigned int
1611 instantiate_virtual_regs (void)
1613 rtx insn;
1615 /* Compute the offsets to use for this function. */
1616 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1617 var_offset = STARTING_FRAME_OFFSET;
1618 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1619 out_arg_offset = STACK_POINTER_OFFSET;
1620 #ifdef FRAME_POINTER_CFA_OFFSET
1621 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1622 #else
1623 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1624 #endif
1626 /* Initialize recognition, indicating that volatile is OK. */
1627 init_recog ();
1629 /* Scan through all the insns, instantiating every virtual register still
1630 present. */
1631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1632 if (INSN_P (insn))
1634 /* These patterns in the instruction stream can never be recognized.
1635 Fortunately, they shouldn't contain virtual registers either. */
1636 if (GET_CODE (PATTERN (insn)) == USE
1637 || GET_CODE (PATTERN (insn)) == CLOBBER
1638 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1639 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1640 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1641 continue;
1643 instantiate_virtual_regs_in_insn (insn);
1645 if (INSN_DELETED_P (insn))
1646 continue;
1648 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1650 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1651 if (GET_CODE (insn) == CALL_INSN)
1652 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1653 instantiate_virtual_regs_in_rtx, NULL);
1656 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1657 instantiate_decls (current_function_decl);
1659 targetm.instantiate_decls ();
1661 /* Indicate that, from now on, assign_stack_local should use
1662 frame_pointer_rtx. */
1663 virtuals_instantiated = 1;
1664 return 0;
1667 struct rtl_opt_pass pass_instantiate_virtual_regs =
1670 RTL_PASS,
1671 "vregs", /* name */
1672 NULL, /* gate */
1673 instantiate_virtual_regs, /* execute */
1674 NULL, /* sub */
1675 NULL, /* next */
1676 0, /* static_pass_number */
1677 0, /* tv_id */
1678 0, /* properties_required */
1679 0, /* properties_provided */
1680 0, /* properties_destroyed */
1681 0, /* todo_flags_start */
1682 TODO_dump_func /* todo_flags_finish */
1687 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1688 This means a type for which function calls must pass an address to the
1689 function or get an address back from the function.
1690 EXP may be a type node or an expression (whose type is tested). */
1693 aggregate_value_p (const_tree exp, const_tree fntype)
1695 int i, regno, nregs;
1696 rtx reg;
1698 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1700 /* DECL node associated with FNTYPE when relevant, which we might need to
1701 check for by-invisible-reference returns, typically for CALL_EXPR input
1702 EXPressions. */
1703 const_tree fndecl = NULL_TREE;
1705 if (fntype)
1706 switch (TREE_CODE (fntype))
1708 case CALL_EXPR:
1709 fndecl = get_callee_fndecl (fntype);
1710 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1711 break;
1712 case FUNCTION_DECL:
1713 fndecl = fntype;
1714 fntype = TREE_TYPE (fndecl);
1715 break;
1716 case FUNCTION_TYPE:
1717 case METHOD_TYPE:
1718 break;
1719 case IDENTIFIER_NODE:
1720 fntype = 0;
1721 break;
1722 default:
1723 /* We don't expect other rtl types here. */
1724 gcc_unreachable ();
1727 if (TREE_CODE (type) == VOID_TYPE)
1728 return 0;
1730 /* If the front end has decided that this needs to be passed by
1731 reference, do so. */
1732 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1733 && DECL_BY_REFERENCE (exp))
1734 return 1;
1736 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1737 called function RESULT_DECL, meaning the function returns in memory by
1738 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1739 on the function type, which used to be the way to request such a return
1740 mechanism but might now be causing troubles at gimplification time if
1741 temporaries with the function type need to be created. */
1742 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1743 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1744 return 1;
1746 if (targetm.calls.return_in_memory (type, fntype))
1747 return 1;
1748 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1749 and thus can't be returned in registers. */
1750 if (TREE_ADDRESSABLE (type))
1751 return 1;
1752 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1753 return 1;
1754 /* Make sure we have suitable call-clobbered regs to return
1755 the value in; if not, we must return it in memory. */
1756 reg = hard_function_value (type, 0, fntype, 0);
1758 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1759 it is OK. */
1760 if (!REG_P (reg))
1761 return 0;
1763 regno = REGNO (reg);
1764 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1765 for (i = 0; i < nregs; i++)
1766 if (! call_used_regs[regno + i])
1767 return 1;
1768 return 0;
1771 /* Return true if we should assign DECL a pseudo register; false if it
1772 should live on the local stack. */
1774 bool
1775 use_register_for_decl (const_tree decl)
1777 if (!targetm.calls.allocate_stack_slots_for_args())
1778 return true;
1780 /* Honor volatile. */
1781 if (TREE_SIDE_EFFECTS (decl))
1782 return false;
1784 /* Honor addressability. */
1785 if (TREE_ADDRESSABLE (decl))
1786 return false;
1788 /* Only register-like things go in registers. */
1789 if (DECL_MODE (decl) == BLKmode)
1790 return false;
1792 /* If -ffloat-store specified, don't put explicit float variables
1793 into registers. */
1794 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1795 propagates values across these stores, and it probably shouldn't. */
1796 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1797 return false;
1799 /* If we're not interested in tracking debugging information for
1800 this decl, then we can certainly put it in a register. */
1801 if (DECL_IGNORED_P (decl))
1802 return true;
1804 return (optimize || DECL_REGISTER (decl));
1807 /* Return true if TYPE should be passed by invisible reference. */
1809 bool
1810 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1811 tree type, bool named_arg)
1813 if (type)
1815 /* If this type contains non-trivial constructors, then it is
1816 forbidden for the middle-end to create any new copies. */
1817 if (TREE_ADDRESSABLE (type))
1818 return true;
1820 /* GCC post 3.4 passes *all* variable sized types by reference. */
1821 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1822 return true;
1825 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1828 /* Return true if TYPE, which is passed by reference, should be callee
1829 copied instead of caller copied. */
1831 bool
1832 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1833 tree type, bool named_arg)
1835 if (type && TREE_ADDRESSABLE (type))
1836 return false;
1837 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1840 /* Structures to communicate between the subroutines of assign_parms.
1841 The first holds data persistent across all parameters, the second
1842 is cleared out for each parameter. */
1844 struct assign_parm_data_all
1846 CUMULATIVE_ARGS args_so_far;
1847 struct args_size stack_args_size;
1848 tree function_result_decl;
1849 tree orig_fnargs;
1850 rtx first_conversion_insn;
1851 rtx last_conversion_insn;
1852 HOST_WIDE_INT pretend_args_size;
1853 HOST_WIDE_INT extra_pretend_bytes;
1854 int reg_parm_stack_space;
1857 struct assign_parm_data_one
1859 tree nominal_type;
1860 tree passed_type;
1861 rtx entry_parm;
1862 rtx stack_parm;
1863 enum machine_mode nominal_mode;
1864 enum machine_mode passed_mode;
1865 enum machine_mode promoted_mode;
1866 struct locate_and_pad_arg_data locate;
1867 int partial;
1868 BOOL_BITFIELD named_arg : 1;
1869 BOOL_BITFIELD passed_pointer : 1;
1870 BOOL_BITFIELD on_stack : 1;
1871 BOOL_BITFIELD loaded_in_reg : 1;
1874 /* A subroutine of assign_parms. Initialize ALL. */
1876 static void
1877 assign_parms_initialize_all (struct assign_parm_data_all *all)
1879 tree fntype;
1881 memset (all, 0, sizeof (*all));
1883 fntype = TREE_TYPE (current_function_decl);
1885 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1886 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1887 #else
1888 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1889 current_function_decl, -1);
1890 #endif
1892 #ifdef REG_PARM_STACK_SPACE
1893 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1894 #endif
1897 /* If ARGS contains entries with complex types, split the entry into two
1898 entries of the component type. Return a new list of substitutions are
1899 needed, else the old list. */
1901 static tree
1902 split_complex_args (tree args)
1904 tree p;
1906 /* Before allocating memory, check for the common case of no complex. */
1907 for (p = args; p; p = TREE_CHAIN (p))
1909 tree type = TREE_TYPE (p);
1910 if (TREE_CODE (type) == COMPLEX_TYPE
1911 && targetm.calls.split_complex_arg (type))
1912 goto found;
1914 return args;
1916 found:
1917 args = copy_list (args);
1919 for (p = args; p; p = TREE_CHAIN (p))
1921 tree type = TREE_TYPE (p);
1922 if (TREE_CODE (type) == COMPLEX_TYPE
1923 && targetm.calls.split_complex_arg (type))
1925 tree decl;
1926 tree subtype = TREE_TYPE (type);
1927 bool addressable = TREE_ADDRESSABLE (p);
1929 /* Rewrite the PARM_DECL's type with its component. */
1930 TREE_TYPE (p) = subtype;
1931 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1932 DECL_MODE (p) = VOIDmode;
1933 DECL_SIZE (p) = NULL;
1934 DECL_SIZE_UNIT (p) = NULL;
1935 /* If this arg must go in memory, put it in a pseudo here.
1936 We can't allow it to go in memory as per normal parms,
1937 because the usual place might not have the imag part
1938 adjacent to the real part. */
1939 DECL_ARTIFICIAL (p) = addressable;
1940 DECL_IGNORED_P (p) = addressable;
1941 TREE_ADDRESSABLE (p) = 0;
1942 layout_decl (p, 0);
1944 /* Build a second synthetic decl. */
1945 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1946 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1947 DECL_ARTIFICIAL (decl) = addressable;
1948 DECL_IGNORED_P (decl) = addressable;
1949 layout_decl (decl, 0);
1951 /* Splice it in; skip the new decl. */
1952 TREE_CHAIN (decl) = TREE_CHAIN (p);
1953 TREE_CHAIN (p) = decl;
1954 p = decl;
1958 return args;
1961 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1962 the hidden struct return argument, and (abi willing) complex args.
1963 Return the new parameter list. */
1965 static tree
1966 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1968 tree fndecl = current_function_decl;
1969 tree fntype = TREE_TYPE (fndecl);
1970 tree fnargs = DECL_ARGUMENTS (fndecl);
1972 /* If struct value address is treated as the first argument, make it so. */
1973 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1974 && ! cfun->returns_pcc_struct
1975 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1977 tree type = build_pointer_type (TREE_TYPE (fntype));
1978 tree decl;
1980 decl = build_decl (PARM_DECL, NULL_TREE, type);
1981 DECL_ARG_TYPE (decl) = type;
1982 DECL_ARTIFICIAL (decl) = 1;
1983 DECL_IGNORED_P (decl) = 1;
1985 TREE_CHAIN (decl) = fnargs;
1986 fnargs = decl;
1987 all->function_result_decl = decl;
1990 all->orig_fnargs = fnargs;
1992 /* If the target wants to split complex arguments into scalars, do so. */
1993 if (targetm.calls.split_complex_arg)
1994 fnargs = split_complex_args (fnargs);
1996 return fnargs;
1999 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2000 data for the parameter. Incorporate ABI specifics such as pass-by-
2001 reference and type promotion. */
2003 static void
2004 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2005 struct assign_parm_data_one *data)
2007 tree nominal_type, passed_type;
2008 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2010 memset (data, 0, sizeof (*data));
2012 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2013 if (!cfun->stdarg)
2014 data->named_arg = 1; /* No varadic parms. */
2015 else if (TREE_CHAIN (parm))
2016 data->named_arg = 1; /* Not the last non-varadic parm. */
2017 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2018 data->named_arg = 1; /* Only varadic ones are unnamed. */
2019 else
2020 data->named_arg = 0; /* Treat as varadic. */
2022 nominal_type = TREE_TYPE (parm);
2023 passed_type = DECL_ARG_TYPE (parm);
2025 /* Look out for errors propagating this far. Also, if the parameter's
2026 type is void then its value doesn't matter. */
2027 if (TREE_TYPE (parm) == error_mark_node
2028 /* This can happen after weird syntax errors
2029 or if an enum type is defined among the parms. */
2030 || TREE_CODE (parm) != PARM_DECL
2031 || passed_type == NULL
2032 || VOID_TYPE_P (nominal_type))
2034 nominal_type = passed_type = void_type_node;
2035 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2036 goto egress;
2039 /* Find mode of arg as it is passed, and mode of arg as it should be
2040 during execution of this function. */
2041 passed_mode = TYPE_MODE (passed_type);
2042 nominal_mode = TYPE_MODE (nominal_type);
2044 /* If the parm is to be passed as a transparent union, use the type of
2045 the first field for the tests below. We have already verified that
2046 the modes are the same. */
2047 if (TREE_CODE (passed_type) == UNION_TYPE
2048 && TYPE_TRANSPARENT_UNION (passed_type))
2049 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2051 /* See if this arg was passed by invisible reference. */
2052 if (pass_by_reference (&all->args_so_far, passed_mode,
2053 passed_type, data->named_arg))
2055 passed_type = nominal_type = build_pointer_type (passed_type);
2056 data->passed_pointer = true;
2057 passed_mode = nominal_mode = Pmode;
2060 /* Find mode as it is passed by the ABI. */
2061 promoted_mode = passed_mode;
2062 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2064 int unsignedp = TYPE_UNSIGNED (passed_type);
2065 promoted_mode = promote_mode (passed_type, promoted_mode,
2066 &unsignedp, 1);
2069 egress:
2070 data->nominal_type = nominal_type;
2071 data->passed_type = passed_type;
2072 data->nominal_mode = nominal_mode;
2073 data->passed_mode = passed_mode;
2074 data->promoted_mode = promoted_mode;
2077 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2079 static void
2080 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2081 struct assign_parm_data_one *data, bool no_rtl)
2083 int varargs_pretend_bytes = 0;
2085 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2086 data->promoted_mode,
2087 data->passed_type,
2088 &varargs_pretend_bytes, no_rtl);
2090 /* If the back-end has requested extra stack space, record how much is
2091 needed. Do not change pretend_args_size otherwise since it may be
2092 nonzero from an earlier partial argument. */
2093 if (varargs_pretend_bytes > 0)
2094 all->pretend_args_size = varargs_pretend_bytes;
2097 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2098 the incoming location of the current parameter. */
2100 static void
2101 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2102 struct assign_parm_data_one *data)
2104 HOST_WIDE_INT pretend_bytes = 0;
2105 rtx entry_parm;
2106 bool in_regs;
2108 if (data->promoted_mode == VOIDmode)
2110 data->entry_parm = data->stack_parm = const0_rtx;
2111 return;
2114 #ifdef FUNCTION_INCOMING_ARG
2115 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2116 data->passed_type, data->named_arg);
2117 #else
2118 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2119 data->passed_type, data->named_arg);
2120 #endif
2122 if (entry_parm == 0)
2123 data->promoted_mode = data->passed_mode;
2125 /* Determine parm's home in the stack, in case it arrives in the stack
2126 or we should pretend it did. Compute the stack position and rtx where
2127 the argument arrives and its size.
2129 There is one complexity here: If this was a parameter that would
2130 have been passed in registers, but wasn't only because it is
2131 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2132 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2133 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2134 as it was the previous time. */
2135 in_regs = entry_parm != 0;
2136 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2137 in_regs = true;
2138 #endif
2139 if (!in_regs && !data->named_arg)
2141 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2143 rtx tem;
2144 #ifdef FUNCTION_INCOMING_ARG
2145 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2146 data->passed_type, true);
2147 #else
2148 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2149 data->passed_type, true);
2150 #endif
2151 in_regs = tem != NULL;
2155 /* If this parameter was passed both in registers and in the stack, use
2156 the copy on the stack. */
2157 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2158 data->passed_type))
2159 entry_parm = 0;
2161 if (entry_parm)
2163 int partial;
2165 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2166 data->promoted_mode,
2167 data->passed_type,
2168 data->named_arg);
2169 data->partial = partial;
2171 /* The caller might already have allocated stack space for the
2172 register parameters. */
2173 if (partial != 0 && all->reg_parm_stack_space == 0)
2175 /* Part of this argument is passed in registers and part
2176 is passed on the stack. Ask the prologue code to extend
2177 the stack part so that we can recreate the full value.
2179 PRETEND_BYTES is the size of the registers we need to store.
2180 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2181 stack space that the prologue should allocate.
2183 Internally, gcc assumes that the argument pointer is aligned
2184 to STACK_BOUNDARY bits. This is used both for alignment
2185 optimizations (see init_emit) and to locate arguments that are
2186 aligned to more than PARM_BOUNDARY bits. We must preserve this
2187 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2188 a stack boundary. */
2190 /* We assume at most one partial arg, and it must be the first
2191 argument on the stack. */
2192 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2194 pretend_bytes = partial;
2195 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2197 /* We want to align relative to the actual stack pointer, so
2198 don't include this in the stack size until later. */
2199 all->extra_pretend_bytes = all->pretend_args_size;
2203 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2204 entry_parm ? data->partial : 0, current_function_decl,
2205 &all->stack_args_size, &data->locate);
2207 /* Adjust offsets to include the pretend args. */
2208 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2209 data->locate.slot_offset.constant += pretend_bytes;
2210 data->locate.offset.constant += pretend_bytes;
2212 data->entry_parm = entry_parm;
2215 /* A subroutine of assign_parms. If there is actually space on the stack
2216 for this parm, count it in stack_args_size and return true. */
2218 static bool
2219 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2220 struct assign_parm_data_one *data)
2222 /* Trivially true if we've no incoming register. */
2223 if (data->entry_parm == NULL)
2225 /* Also true if we're partially in registers and partially not,
2226 since we've arranged to drop the entire argument on the stack. */
2227 else if (data->partial != 0)
2229 /* Also true if the target says that it's passed in both registers
2230 and on the stack. */
2231 else if (GET_CODE (data->entry_parm) == PARALLEL
2232 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2234 /* Also true if the target says that there's stack allocated for
2235 all register parameters. */
2236 else if (all->reg_parm_stack_space > 0)
2238 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2239 else
2240 return false;
2242 all->stack_args_size.constant += data->locate.size.constant;
2243 if (data->locate.size.var)
2244 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2246 return true;
2249 /* A subroutine of assign_parms. Given that this parameter is allocated
2250 stack space by the ABI, find it. */
2252 static void
2253 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2255 rtx offset_rtx, stack_parm;
2256 unsigned int align, boundary;
2258 /* If we're passing this arg using a reg, make its stack home the
2259 aligned stack slot. */
2260 if (data->entry_parm)
2261 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2262 else
2263 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2265 stack_parm = crtl->args.internal_arg_pointer;
2266 if (offset_rtx != const0_rtx)
2267 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2268 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2270 set_mem_attributes (stack_parm, parm, 1);
2272 boundary = data->locate.boundary;
2273 align = BITS_PER_UNIT;
2275 /* If we're padding upward, we know that the alignment of the slot
2276 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2277 intentionally forcing upward padding. Otherwise we have to come
2278 up with a guess at the alignment based on OFFSET_RTX. */
2279 if (data->locate.where_pad != downward || data->entry_parm)
2280 align = boundary;
2281 else if (GET_CODE (offset_rtx) == CONST_INT)
2283 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2284 align = align & -align;
2286 set_mem_align (stack_parm, align);
2288 if (data->entry_parm)
2289 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2291 data->stack_parm = stack_parm;
2294 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2295 always valid and contiguous. */
2297 static void
2298 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2300 rtx entry_parm = data->entry_parm;
2301 rtx stack_parm = data->stack_parm;
2303 /* If this parm was passed part in regs and part in memory, pretend it
2304 arrived entirely in memory by pushing the register-part onto the stack.
2305 In the special case of a DImode or DFmode that is split, we could put
2306 it together in a pseudoreg directly, but for now that's not worth
2307 bothering with. */
2308 if (data->partial != 0)
2310 /* Handle calls that pass values in multiple non-contiguous
2311 locations. The Irix 6 ABI has examples of this. */
2312 if (GET_CODE (entry_parm) == PARALLEL)
2313 emit_group_store (validize_mem (stack_parm), entry_parm,
2314 data->passed_type,
2315 int_size_in_bytes (data->passed_type));
2316 else
2318 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2319 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2320 data->partial / UNITS_PER_WORD);
2323 entry_parm = stack_parm;
2326 /* If we didn't decide this parm came in a register, by default it came
2327 on the stack. */
2328 else if (entry_parm == NULL)
2329 entry_parm = stack_parm;
2331 /* When an argument is passed in multiple locations, we can't make use
2332 of this information, but we can save some copying if the whole argument
2333 is passed in a single register. */
2334 else if (GET_CODE (entry_parm) == PARALLEL
2335 && data->nominal_mode != BLKmode
2336 && data->passed_mode != BLKmode)
2338 size_t i, len = XVECLEN (entry_parm, 0);
2340 for (i = 0; i < len; i++)
2341 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2342 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2343 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2344 == data->passed_mode)
2345 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2347 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2348 break;
2352 data->entry_parm = entry_parm;
2355 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2356 always valid and properly aligned. */
2358 static void
2359 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2361 rtx stack_parm = data->stack_parm;
2363 /* If we can't trust the parm stack slot to be aligned enough for its
2364 ultimate type, don't use that slot after entry. We'll make another
2365 stack slot, if we need one. */
2366 if (stack_parm
2367 && ((STRICT_ALIGNMENT
2368 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2369 || (data->nominal_type
2370 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2371 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2372 stack_parm = NULL;
2374 /* If parm was passed in memory, and we need to convert it on entry,
2375 don't store it back in that same slot. */
2376 else if (data->entry_parm == stack_parm
2377 && data->nominal_mode != BLKmode
2378 && data->nominal_mode != data->passed_mode)
2379 stack_parm = NULL;
2381 /* If stack protection is in effect for this function, don't leave any
2382 pointers in their passed stack slots. */
2383 else if (crtl->stack_protect_guard
2384 && (flag_stack_protect == 2
2385 || data->passed_pointer
2386 || POINTER_TYPE_P (data->nominal_type)))
2387 stack_parm = NULL;
2389 data->stack_parm = stack_parm;
2392 /* A subroutine of assign_parms. Return true if the current parameter
2393 should be stored as a BLKmode in the current frame. */
2395 static bool
2396 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2398 if (data->nominal_mode == BLKmode)
2399 return true;
2400 if (GET_CODE (data->entry_parm) == PARALLEL)
2401 return true;
2403 #ifdef BLOCK_REG_PADDING
2404 /* Only assign_parm_setup_block knows how to deal with register arguments
2405 that are padded at the least significant end. */
2406 if (REG_P (data->entry_parm)
2407 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2408 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2409 == (BYTES_BIG_ENDIAN ? upward : downward)))
2410 return true;
2411 #endif
2413 return false;
2416 /* A subroutine of assign_parms. Arrange for the parameter to be
2417 present and valid in DATA->STACK_RTL. */
2419 static void
2420 assign_parm_setup_block (struct assign_parm_data_all *all,
2421 tree parm, struct assign_parm_data_one *data)
2423 rtx entry_parm = data->entry_parm;
2424 rtx stack_parm = data->stack_parm;
2425 HOST_WIDE_INT size;
2426 HOST_WIDE_INT size_stored;
2427 rtx orig_entry_parm = entry_parm;
2429 if (GET_CODE (entry_parm) == PARALLEL)
2430 entry_parm = emit_group_move_into_temps (entry_parm);
2432 /* If we've a non-block object that's nevertheless passed in parts,
2433 reconstitute it in register operations rather than on the stack. */
2434 if (GET_CODE (entry_parm) == PARALLEL
2435 && data->nominal_mode != BLKmode)
2437 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2439 if ((XVECLEN (entry_parm, 0) > 1
2440 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2441 && use_register_for_decl (parm))
2443 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2445 push_to_sequence2 (all->first_conversion_insn,
2446 all->last_conversion_insn);
2448 /* For values returned in multiple registers, handle possible
2449 incompatible calls to emit_group_store.
2451 For example, the following would be invalid, and would have to
2452 be fixed by the conditional below:
2454 emit_group_store ((reg:SF), (parallel:DF))
2455 emit_group_store ((reg:SI), (parallel:DI))
2457 An example of this are doubles in e500 v2:
2458 (parallel:DF (expr_list (reg:SI) (const_int 0))
2459 (expr_list (reg:SI) (const_int 4))). */
2460 if (data->nominal_mode != data->passed_mode)
2462 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2463 emit_group_store (t, entry_parm, NULL_TREE,
2464 GET_MODE_SIZE (GET_MODE (entry_parm)));
2465 convert_move (parmreg, t, 0);
2467 else
2468 emit_group_store (parmreg, entry_parm, data->nominal_type,
2469 int_size_in_bytes (data->nominal_type));
2471 all->first_conversion_insn = get_insns ();
2472 all->last_conversion_insn = get_last_insn ();
2473 end_sequence ();
2475 SET_DECL_RTL (parm, parmreg);
2476 return;
2480 size = int_size_in_bytes (data->passed_type);
2481 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2482 if (stack_parm == 0)
2484 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2485 stack_parm = assign_stack_local (BLKmode, size_stored,
2486 DECL_ALIGN (parm));
2487 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2488 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2489 set_mem_attributes (stack_parm, parm, 1);
2492 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2493 calls that pass values in multiple non-contiguous locations. */
2494 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2496 rtx mem;
2498 /* Note that we will be storing an integral number of words.
2499 So we have to be careful to ensure that we allocate an
2500 integral number of words. We do this above when we call
2501 assign_stack_local if space was not allocated in the argument
2502 list. If it was, this will not work if PARM_BOUNDARY is not
2503 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2504 if it becomes a problem. Exception is when BLKmode arrives
2505 with arguments not conforming to word_mode. */
2507 if (data->stack_parm == 0)
2509 else if (GET_CODE (entry_parm) == PARALLEL)
2511 else
2512 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2514 mem = validize_mem (stack_parm);
2516 /* Handle values in multiple non-contiguous locations. */
2517 if (GET_CODE (entry_parm) == PARALLEL)
2519 push_to_sequence2 (all->first_conversion_insn,
2520 all->last_conversion_insn);
2521 emit_group_store (mem, entry_parm, data->passed_type, size);
2522 all->first_conversion_insn = get_insns ();
2523 all->last_conversion_insn = get_last_insn ();
2524 end_sequence ();
2527 else if (size == 0)
2530 /* If SIZE is that of a mode no bigger than a word, just use
2531 that mode's store operation. */
2532 else if (size <= UNITS_PER_WORD)
2534 enum machine_mode mode
2535 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2537 if (mode != BLKmode
2538 #ifdef BLOCK_REG_PADDING
2539 && (size == UNITS_PER_WORD
2540 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2541 != (BYTES_BIG_ENDIAN ? upward : downward)))
2542 #endif
2545 rtx reg;
2547 /* We are really truncating a word_mode value containing
2548 SIZE bytes into a value of mode MODE. If such an
2549 operation requires no actual instructions, we can refer
2550 to the value directly in mode MODE, otherwise we must
2551 start with the register in word_mode and explicitly
2552 convert it. */
2553 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2554 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2555 else
2557 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2558 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2560 emit_move_insn (change_address (mem, mode, 0), reg);
2563 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2564 machine must be aligned to the left before storing
2565 to memory. Note that the previous test doesn't
2566 handle all cases (e.g. SIZE == 3). */
2567 else if (size != UNITS_PER_WORD
2568 #ifdef BLOCK_REG_PADDING
2569 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2570 == downward)
2571 #else
2572 && BYTES_BIG_ENDIAN
2573 #endif
2576 rtx tem, x;
2577 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2578 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2580 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2581 build_int_cst (NULL_TREE, by),
2582 NULL_RTX, 1);
2583 tem = change_address (mem, word_mode, 0);
2584 emit_move_insn (tem, x);
2586 else
2587 move_block_from_reg (REGNO (entry_parm), mem,
2588 size_stored / UNITS_PER_WORD);
2590 else
2591 move_block_from_reg (REGNO (entry_parm), mem,
2592 size_stored / UNITS_PER_WORD);
2594 else if (data->stack_parm == 0)
2596 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2597 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2598 BLOCK_OP_NORMAL);
2599 all->first_conversion_insn = get_insns ();
2600 all->last_conversion_insn = get_last_insn ();
2601 end_sequence ();
2604 data->stack_parm = stack_parm;
2605 SET_DECL_RTL (parm, stack_parm);
2608 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2609 parameter. Get it there. Perform all ABI specified conversions. */
2611 static void
2612 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2613 struct assign_parm_data_one *data)
2615 rtx parmreg;
2616 enum machine_mode promoted_nominal_mode;
2617 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2618 bool did_conversion = false;
2620 /* Store the parm in a pseudoregister during the function, but we may
2621 need to do it in a wider mode. */
2623 /* This is not really promoting for a call. However we need to be
2624 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2625 promoted_nominal_mode
2626 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2628 parmreg = gen_reg_rtx (promoted_nominal_mode);
2630 if (!DECL_ARTIFICIAL (parm))
2631 mark_user_reg (parmreg);
2633 /* If this was an item that we received a pointer to,
2634 set DECL_RTL appropriately. */
2635 if (data->passed_pointer)
2637 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2638 set_mem_attributes (x, parm, 1);
2639 SET_DECL_RTL (parm, x);
2641 else
2642 SET_DECL_RTL (parm, parmreg);
2644 /* Copy the value into the register. */
2645 if (data->nominal_mode != data->passed_mode
2646 || promoted_nominal_mode != data->promoted_mode)
2648 int save_tree_used;
2650 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2651 mode, by the caller. We now have to convert it to
2652 NOMINAL_MODE, if different. However, PARMREG may be in
2653 a different mode than NOMINAL_MODE if it is being stored
2654 promoted.
2656 If ENTRY_PARM is a hard register, it might be in a register
2657 not valid for operating in its mode (e.g., an odd-numbered
2658 register for a DFmode). In that case, moves are the only
2659 thing valid, so we can't do a convert from there. This
2660 occurs when the calling sequence allow such misaligned
2661 usages.
2663 In addition, the conversion may involve a call, which could
2664 clobber parameters which haven't been copied to pseudo
2665 registers yet. Therefore, we must first copy the parm to
2666 a pseudo reg here, and save the conversion until after all
2667 parameters have been moved. */
2669 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2671 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2673 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2674 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2676 if (GET_CODE (tempreg) == SUBREG
2677 && GET_MODE (tempreg) == data->nominal_mode
2678 && REG_P (SUBREG_REG (tempreg))
2679 && data->nominal_mode == data->passed_mode
2680 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2681 && GET_MODE_SIZE (GET_MODE (tempreg))
2682 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2684 /* The argument is already sign/zero extended, so note it
2685 into the subreg. */
2686 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2687 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2690 /* TREE_USED gets set erroneously during expand_assignment. */
2691 save_tree_used = TREE_USED (parm);
2692 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2693 TREE_USED (parm) = save_tree_used;
2694 all->first_conversion_insn = get_insns ();
2695 all->last_conversion_insn = get_last_insn ();
2696 end_sequence ();
2698 did_conversion = true;
2700 else
2701 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2703 /* If we were passed a pointer but the actual value can safely live
2704 in a register, put it in one. */
2705 if (data->passed_pointer
2706 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2707 /* If by-reference argument was promoted, demote it. */
2708 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2709 || use_register_for_decl (parm)))
2711 /* We can't use nominal_mode, because it will have been set to
2712 Pmode above. We must use the actual mode of the parm. */
2713 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2714 mark_user_reg (parmreg);
2716 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2718 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2719 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2721 push_to_sequence2 (all->first_conversion_insn,
2722 all->last_conversion_insn);
2723 emit_move_insn (tempreg, DECL_RTL (parm));
2724 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2725 emit_move_insn (parmreg, tempreg);
2726 all->first_conversion_insn = get_insns ();
2727 all->last_conversion_insn = get_last_insn ();
2728 end_sequence ();
2730 did_conversion = true;
2732 else
2733 emit_move_insn (parmreg, DECL_RTL (parm));
2735 SET_DECL_RTL (parm, parmreg);
2737 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2738 now the parm. */
2739 data->stack_parm = NULL;
2742 /* Mark the register as eliminable if we did no conversion and it was
2743 copied from memory at a fixed offset, and the arg pointer was not
2744 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2745 offset formed an invalid address, such memory-equivalences as we
2746 make here would screw up life analysis for it. */
2747 if (data->nominal_mode == data->passed_mode
2748 && !did_conversion
2749 && data->stack_parm != 0
2750 && MEM_P (data->stack_parm)
2751 && data->locate.offset.var == 0
2752 && reg_mentioned_p (virtual_incoming_args_rtx,
2753 XEXP (data->stack_parm, 0)))
2755 rtx linsn = get_last_insn ();
2756 rtx sinsn, set;
2758 /* Mark complex types separately. */
2759 if (GET_CODE (parmreg) == CONCAT)
2761 enum machine_mode submode
2762 = GET_MODE_INNER (GET_MODE (parmreg));
2763 int regnor = REGNO (XEXP (parmreg, 0));
2764 int regnoi = REGNO (XEXP (parmreg, 1));
2765 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2766 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2767 GET_MODE_SIZE (submode));
2769 /* Scan backwards for the set of the real and
2770 imaginary parts. */
2771 for (sinsn = linsn; sinsn != 0;
2772 sinsn = prev_nonnote_insn (sinsn))
2774 set = single_set (sinsn);
2775 if (set == 0)
2776 continue;
2778 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2779 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2780 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2781 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2784 else if ((set = single_set (linsn)) != 0
2785 && SET_DEST (set) == parmreg)
2786 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2789 /* For pointer data type, suggest pointer register. */
2790 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2791 mark_reg_pointer (parmreg,
2792 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2795 /* A subroutine of assign_parms. Allocate stack space to hold the current
2796 parameter. Get it there. Perform all ABI specified conversions. */
2798 static void
2799 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2800 struct assign_parm_data_one *data)
2802 /* Value must be stored in the stack slot STACK_PARM during function
2803 execution. */
2804 bool to_conversion = false;
2806 if (data->promoted_mode != data->nominal_mode)
2808 /* Conversion is required. */
2809 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2811 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2813 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2814 to_conversion = true;
2816 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2817 TYPE_UNSIGNED (TREE_TYPE (parm)));
2819 if (data->stack_parm)
2820 /* ??? This may need a big-endian conversion on sparc64. */
2821 data->stack_parm
2822 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2825 if (data->entry_parm != data->stack_parm)
2827 rtx src, dest;
2829 if (data->stack_parm == 0)
2831 data->stack_parm
2832 = assign_stack_local (GET_MODE (data->entry_parm),
2833 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2834 TYPE_ALIGN (data->passed_type));
2835 set_mem_attributes (data->stack_parm, parm, 1);
2838 dest = validize_mem (data->stack_parm);
2839 src = validize_mem (data->entry_parm);
2841 if (MEM_P (src))
2843 /* Use a block move to handle potentially misaligned entry_parm. */
2844 if (!to_conversion)
2845 push_to_sequence2 (all->first_conversion_insn,
2846 all->last_conversion_insn);
2847 to_conversion = true;
2849 emit_block_move (dest, src,
2850 GEN_INT (int_size_in_bytes (data->passed_type)),
2851 BLOCK_OP_NORMAL);
2853 else
2854 emit_move_insn (dest, src);
2857 if (to_conversion)
2859 all->first_conversion_insn = get_insns ();
2860 all->last_conversion_insn = get_last_insn ();
2861 end_sequence ();
2864 SET_DECL_RTL (parm, data->stack_parm);
2867 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2868 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2870 static void
2871 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2873 tree parm;
2874 tree orig_fnargs = all->orig_fnargs;
2876 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2878 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2879 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2881 rtx tmp, real, imag;
2882 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2884 real = DECL_RTL (fnargs);
2885 imag = DECL_RTL (TREE_CHAIN (fnargs));
2886 if (inner != GET_MODE (real))
2888 real = gen_lowpart_SUBREG (inner, real);
2889 imag = gen_lowpart_SUBREG (inner, imag);
2892 if (TREE_ADDRESSABLE (parm))
2894 rtx rmem, imem;
2895 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2897 /* split_complex_arg put the real and imag parts in
2898 pseudos. Move them to memory. */
2899 tmp = assign_stack_local (DECL_MODE (parm), size,
2900 TYPE_ALIGN (TREE_TYPE (parm)));
2901 set_mem_attributes (tmp, parm, 1);
2902 rmem = adjust_address_nv (tmp, inner, 0);
2903 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2904 push_to_sequence2 (all->first_conversion_insn,
2905 all->last_conversion_insn);
2906 emit_move_insn (rmem, real);
2907 emit_move_insn (imem, imag);
2908 all->first_conversion_insn = get_insns ();
2909 all->last_conversion_insn = get_last_insn ();
2910 end_sequence ();
2912 else
2913 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2914 SET_DECL_RTL (parm, tmp);
2916 real = DECL_INCOMING_RTL (fnargs);
2917 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2918 if (inner != GET_MODE (real))
2920 real = gen_lowpart_SUBREG (inner, real);
2921 imag = gen_lowpart_SUBREG (inner, imag);
2923 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2924 set_decl_incoming_rtl (parm, tmp, false);
2925 fnargs = TREE_CHAIN (fnargs);
2927 else
2929 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2930 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2932 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2933 instead of the copy of decl, i.e. FNARGS. */
2934 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2935 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2938 fnargs = TREE_CHAIN (fnargs);
2942 /* Assign RTL expressions to the function's parameters. This may involve
2943 copying them into registers and using those registers as the DECL_RTL. */
2945 static void
2946 assign_parms (tree fndecl)
2948 struct assign_parm_data_all all;
2949 tree fnargs, parm;
2951 crtl->args.internal_arg_pointer
2952 = targetm.calls.internal_arg_pointer ();
2954 assign_parms_initialize_all (&all);
2955 fnargs = assign_parms_augmented_arg_list (&all);
2957 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2959 struct assign_parm_data_one data;
2961 /* Extract the type of PARM; adjust it according to ABI. */
2962 assign_parm_find_data_types (&all, parm, &data);
2964 /* Early out for errors and void parameters. */
2965 if (data.passed_mode == VOIDmode)
2967 SET_DECL_RTL (parm, const0_rtx);
2968 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2969 continue;
2972 if (cfun->stdarg && !TREE_CHAIN (parm))
2973 assign_parms_setup_varargs (&all, &data, false);
2975 /* Find out where the parameter arrives in this function. */
2976 assign_parm_find_entry_rtl (&all, &data);
2978 /* Find out where stack space for this parameter might be. */
2979 if (assign_parm_is_stack_parm (&all, &data))
2981 assign_parm_find_stack_rtl (parm, &data);
2982 assign_parm_adjust_entry_rtl (&data);
2985 /* Record permanently how this parm was passed. */
2986 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2988 /* Update info on where next arg arrives in registers. */
2989 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2990 data.passed_type, data.named_arg);
2992 assign_parm_adjust_stack_rtl (&data);
2994 if (assign_parm_setup_block_p (&data))
2995 assign_parm_setup_block (&all, parm, &data);
2996 else if (data.passed_pointer || use_register_for_decl (parm))
2997 assign_parm_setup_reg (&all, parm, &data);
2998 else
2999 assign_parm_setup_stack (&all, parm, &data);
3002 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3003 assign_parms_unsplit_complex (&all, fnargs);
3005 /* Output all parameter conversion instructions (possibly including calls)
3006 now that all parameters have been copied out of hard registers. */
3007 emit_insn (all.first_conversion_insn);
3009 /* If we are receiving a struct value address as the first argument, set up
3010 the RTL for the function result. As this might require code to convert
3011 the transmitted address to Pmode, we do this here to ensure that possible
3012 preliminary conversions of the address have been emitted already. */
3013 if (all.function_result_decl)
3015 tree result = DECL_RESULT (current_function_decl);
3016 rtx addr = DECL_RTL (all.function_result_decl);
3017 rtx x;
3019 if (DECL_BY_REFERENCE (result))
3020 x = addr;
3021 else
3023 addr = convert_memory_address (Pmode, addr);
3024 x = gen_rtx_MEM (DECL_MODE (result), addr);
3025 set_mem_attributes (x, result, 1);
3027 SET_DECL_RTL (result, x);
3030 /* We have aligned all the args, so add space for the pretend args. */
3031 crtl->args.pretend_args_size = all.pretend_args_size;
3032 all.stack_args_size.constant += all.extra_pretend_bytes;
3033 crtl->args.size = all.stack_args_size.constant;
3035 /* Adjust function incoming argument size for alignment and
3036 minimum length. */
3038 #ifdef REG_PARM_STACK_SPACE
3039 crtl->args.size = MAX (crtl->args.size,
3040 REG_PARM_STACK_SPACE (fndecl));
3041 #endif
3043 crtl->args.size = CEIL_ROUND (crtl->args.size,
3044 PARM_BOUNDARY / BITS_PER_UNIT);
3046 #ifdef ARGS_GROW_DOWNWARD
3047 crtl->args.arg_offset_rtx
3048 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3049 : expand_expr (size_diffop (all.stack_args_size.var,
3050 size_int (-all.stack_args_size.constant)),
3051 NULL_RTX, VOIDmode, 0));
3052 #else
3053 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3054 #endif
3056 /* See how many bytes, if any, of its args a function should try to pop
3057 on return. */
3059 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3060 crtl->args.size);
3062 /* For stdarg.h function, save info about
3063 regs and stack space used by the named args. */
3065 crtl->args.info = all.args_so_far;
3067 /* Set the rtx used for the function return value. Put this in its
3068 own variable so any optimizers that need this information don't have
3069 to include tree.h. Do this here so it gets done when an inlined
3070 function gets output. */
3072 crtl->return_rtx
3073 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3074 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3076 /* If scalar return value was computed in a pseudo-reg, or was a named
3077 return value that got dumped to the stack, copy that to the hard
3078 return register. */
3079 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3081 tree decl_result = DECL_RESULT (fndecl);
3082 rtx decl_rtl = DECL_RTL (decl_result);
3084 if (REG_P (decl_rtl)
3085 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3086 : DECL_REGISTER (decl_result))
3088 rtx real_decl_rtl;
3090 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3091 fndecl, true);
3092 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3093 /* The delay slot scheduler assumes that crtl->return_rtx
3094 holds the hard register containing the return value, not a
3095 temporary pseudo. */
3096 crtl->return_rtx = real_decl_rtl;
3101 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3102 For all seen types, gimplify their sizes. */
3104 static tree
3105 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3107 tree t = *tp;
3109 *walk_subtrees = 0;
3110 if (TYPE_P (t))
3112 if (POINTER_TYPE_P (t))
3113 *walk_subtrees = 1;
3114 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3115 && !TYPE_SIZES_GIMPLIFIED (t))
3117 gimplify_type_sizes (t, (tree *) data);
3118 *walk_subtrees = 1;
3122 return NULL;
3125 /* Gimplify the parameter list for current_function_decl. This involves
3126 evaluating SAVE_EXPRs of variable sized parameters and generating code
3127 to implement callee-copies reference parameters. Returns a list of
3128 statements to add to the beginning of the function, or NULL if nothing
3129 to do. */
3131 tree
3132 gimplify_parameters (void)
3134 struct assign_parm_data_all all;
3135 tree fnargs, parm, stmts = NULL;
3137 assign_parms_initialize_all (&all);
3138 fnargs = assign_parms_augmented_arg_list (&all);
3140 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3142 struct assign_parm_data_one data;
3144 /* Extract the type of PARM; adjust it according to ABI. */
3145 assign_parm_find_data_types (&all, parm, &data);
3147 /* Early out for errors and void parameters. */
3148 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3149 continue;
3151 /* Update info on where next arg arrives in registers. */
3152 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3153 data.passed_type, data.named_arg);
3155 /* ??? Once upon a time variable_size stuffed parameter list
3156 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3157 turned out to be less than manageable in the gimple world.
3158 Now we have to hunt them down ourselves. */
3159 walk_tree_without_duplicates (&data.passed_type,
3160 gimplify_parm_type, &stmts);
3162 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3164 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3165 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3168 if (data.passed_pointer)
3170 tree type = TREE_TYPE (data.passed_type);
3171 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3172 type, data.named_arg))
3174 tree local, t;
3176 /* For constant sized objects, this is trivial; for
3177 variable-sized objects, we have to play games. */
3178 if (TREE_CONSTANT (DECL_SIZE (parm)))
3180 local = create_tmp_var (type, get_name (parm));
3181 DECL_IGNORED_P (local) = 0;
3183 else
3185 tree ptr_type, addr;
3187 ptr_type = build_pointer_type (type);
3188 addr = create_tmp_var (ptr_type, get_name (parm));
3189 DECL_IGNORED_P (addr) = 0;
3190 local = build_fold_indirect_ref (addr);
3192 t = built_in_decls[BUILT_IN_ALLOCA];
3193 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3194 t = fold_convert (ptr_type, t);
3195 t = build_gimple_modify_stmt (addr, t);
3196 gimplify_and_add (t, &stmts);
3199 t = build_gimple_modify_stmt (local, parm);
3200 gimplify_and_add (t, &stmts);
3202 SET_DECL_VALUE_EXPR (parm, local);
3203 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3208 return stmts;
3211 /* Compute the size and offset from the start of the stacked arguments for a
3212 parm passed in mode PASSED_MODE and with type TYPE.
3214 INITIAL_OFFSET_PTR points to the current offset into the stacked
3215 arguments.
3217 The starting offset and size for this parm are returned in
3218 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3219 nonzero, the offset is that of stack slot, which is returned in
3220 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3221 padding required from the initial offset ptr to the stack slot.
3223 IN_REGS is nonzero if the argument will be passed in registers. It will
3224 never be set if REG_PARM_STACK_SPACE is not defined.
3226 FNDECL is the function in which the argument was defined.
3228 There are two types of rounding that are done. The first, controlled by
3229 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3230 list to be aligned to the specific boundary (in bits). This rounding
3231 affects the initial and starting offsets, but not the argument size.
3233 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3234 optionally rounds the size of the parm to PARM_BOUNDARY. The
3235 initial offset is not affected by this rounding, while the size always
3236 is and the starting offset may be. */
3238 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3239 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3240 callers pass in the total size of args so far as
3241 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3243 void
3244 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3245 int partial, tree fndecl ATTRIBUTE_UNUSED,
3246 struct args_size *initial_offset_ptr,
3247 struct locate_and_pad_arg_data *locate)
3249 tree sizetree;
3250 enum direction where_pad;
3251 unsigned int boundary;
3252 int reg_parm_stack_space = 0;
3253 int part_size_in_regs;
3255 #ifdef REG_PARM_STACK_SPACE
3256 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3258 /* If we have found a stack parm before we reach the end of the
3259 area reserved for registers, skip that area. */
3260 if (! in_regs)
3262 if (reg_parm_stack_space > 0)
3264 if (initial_offset_ptr->var)
3266 initial_offset_ptr->var
3267 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3268 ssize_int (reg_parm_stack_space));
3269 initial_offset_ptr->constant = 0;
3271 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3272 initial_offset_ptr->constant = reg_parm_stack_space;
3275 #endif /* REG_PARM_STACK_SPACE */
3277 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3279 sizetree
3280 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3281 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3282 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3283 locate->where_pad = where_pad;
3284 locate->boundary = boundary;
3286 /* Remember if the outgoing parameter requires extra alignment on the
3287 calling function side. */
3288 if (boundary > PREFERRED_STACK_BOUNDARY)
3289 boundary = PREFERRED_STACK_BOUNDARY;
3290 if (crtl->stack_alignment_needed < boundary)
3291 crtl->stack_alignment_needed = boundary;
3293 #ifdef ARGS_GROW_DOWNWARD
3294 locate->slot_offset.constant = -initial_offset_ptr->constant;
3295 if (initial_offset_ptr->var)
3296 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3297 initial_offset_ptr->var);
3300 tree s2 = sizetree;
3301 if (where_pad != none
3302 && (!host_integerp (sizetree, 1)
3303 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3304 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3305 SUB_PARM_SIZE (locate->slot_offset, s2);
3308 locate->slot_offset.constant += part_size_in_regs;
3310 if (!in_regs
3311 #ifdef REG_PARM_STACK_SPACE
3312 || REG_PARM_STACK_SPACE (fndecl) > 0
3313 #endif
3315 pad_to_arg_alignment (&locate->slot_offset, boundary,
3316 &locate->alignment_pad);
3318 locate->size.constant = (-initial_offset_ptr->constant
3319 - locate->slot_offset.constant);
3320 if (initial_offset_ptr->var)
3321 locate->size.var = size_binop (MINUS_EXPR,
3322 size_binop (MINUS_EXPR,
3323 ssize_int (0),
3324 initial_offset_ptr->var),
3325 locate->slot_offset.var);
3327 /* Pad_below needs the pre-rounded size to know how much to pad
3328 below. */
3329 locate->offset = locate->slot_offset;
3330 if (where_pad == downward)
3331 pad_below (&locate->offset, passed_mode, sizetree);
3333 #else /* !ARGS_GROW_DOWNWARD */
3334 if (!in_regs
3335 #ifdef REG_PARM_STACK_SPACE
3336 || REG_PARM_STACK_SPACE (fndecl) > 0
3337 #endif
3339 pad_to_arg_alignment (initial_offset_ptr, boundary,
3340 &locate->alignment_pad);
3341 locate->slot_offset = *initial_offset_ptr;
3343 #ifdef PUSH_ROUNDING
3344 if (passed_mode != BLKmode)
3345 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3346 #endif
3348 /* Pad_below needs the pre-rounded size to know how much to pad below
3349 so this must be done before rounding up. */
3350 locate->offset = locate->slot_offset;
3351 if (where_pad == downward)
3352 pad_below (&locate->offset, passed_mode, sizetree);
3354 if (where_pad != none
3355 && (!host_integerp (sizetree, 1)
3356 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3357 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3359 ADD_PARM_SIZE (locate->size, sizetree);
3361 locate->size.constant -= part_size_in_regs;
3362 #endif /* ARGS_GROW_DOWNWARD */
3365 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3366 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3368 static void
3369 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3370 struct args_size *alignment_pad)
3372 tree save_var = NULL_TREE;
3373 HOST_WIDE_INT save_constant = 0;
3374 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3375 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3377 #ifdef SPARC_STACK_BOUNDARY_HACK
3378 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3379 the real alignment of %sp. However, when it does this, the
3380 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3381 if (SPARC_STACK_BOUNDARY_HACK)
3382 sp_offset = 0;
3383 #endif
3385 if (boundary > PARM_BOUNDARY)
3387 save_var = offset_ptr->var;
3388 save_constant = offset_ptr->constant;
3391 alignment_pad->var = NULL_TREE;
3392 alignment_pad->constant = 0;
3394 if (boundary > BITS_PER_UNIT)
3396 if (offset_ptr->var)
3398 tree sp_offset_tree = ssize_int (sp_offset);
3399 tree offset = size_binop (PLUS_EXPR,
3400 ARGS_SIZE_TREE (*offset_ptr),
3401 sp_offset_tree);
3402 #ifdef ARGS_GROW_DOWNWARD
3403 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3404 #else
3405 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3406 #endif
3408 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3409 /* ARGS_SIZE_TREE includes constant term. */
3410 offset_ptr->constant = 0;
3411 if (boundary > PARM_BOUNDARY)
3412 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3413 save_var);
3415 else
3417 offset_ptr->constant = -sp_offset +
3418 #ifdef ARGS_GROW_DOWNWARD
3419 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3420 #else
3421 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3422 #endif
3423 if (boundary > PARM_BOUNDARY)
3424 alignment_pad->constant = offset_ptr->constant - save_constant;
3429 static void
3430 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3432 if (passed_mode != BLKmode)
3434 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3435 offset_ptr->constant
3436 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3437 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3438 - GET_MODE_SIZE (passed_mode));
3440 else
3442 if (TREE_CODE (sizetree) != INTEGER_CST
3443 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3445 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3446 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3447 /* Add it in. */
3448 ADD_PARM_SIZE (*offset_ptr, s2);
3449 SUB_PARM_SIZE (*offset_ptr, sizetree);
3455 /* True if register REGNO was alive at a place where `setjmp' was
3456 called and was set more than once or is an argument. Such regs may
3457 be clobbered by `longjmp'. */
3459 static bool
3460 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3462 /* There appear to be cases where some local vars never reach the
3463 backend but have bogus regnos. */
3464 if (regno >= max_reg_num ())
3465 return false;
3467 return ((REG_N_SETS (regno) > 1
3468 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3469 && REGNO_REG_SET_P (setjmp_crosses, regno));
3472 /* Walk the tree of blocks describing the binding levels within a
3473 function and warn about variables the might be killed by setjmp or
3474 vfork. This is done after calling flow_analysis before register
3475 allocation since that will clobber the pseudo-regs to hard
3476 regs. */
3478 static void
3479 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3481 tree decl, sub;
3483 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3485 if (TREE_CODE (decl) == VAR_DECL
3486 && DECL_RTL_SET_P (decl)
3487 && REG_P (DECL_RTL (decl))
3488 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3489 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3490 " %<longjmp%> or %<vfork%>", decl);
3493 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3494 setjmp_vars_warning (setjmp_crosses, sub);
3497 /* Do the appropriate part of setjmp_vars_warning
3498 but for arguments instead of local variables. */
3500 static void
3501 setjmp_args_warning (bitmap setjmp_crosses)
3503 tree decl;
3504 for (decl = DECL_ARGUMENTS (current_function_decl);
3505 decl; decl = TREE_CHAIN (decl))
3506 if (DECL_RTL (decl) != 0
3507 && REG_P (DECL_RTL (decl))
3508 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3509 warning (OPT_Wclobbered,
3510 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3511 decl);
3514 /* Generate warning messages for variables live across setjmp. */
3516 void
3517 generate_setjmp_warnings (void)
3519 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3521 if (n_basic_blocks == NUM_FIXED_BLOCKS
3522 || bitmap_empty_p (setjmp_crosses))
3523 return;
3525 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3526 setjmp_args_warning (setjmp_crosses);
3530 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3531 and create duplicate blocks. */
3532 /* ??? Need an option to either create block fragments or to create
3533 abstract origin duplicates of a source block. It really depends
3534 on what optimization has been performed. */
3536 void
3537 reorder_blocks (void)
3539 tree block = DECL_INITIAL (current_function_decl);
3540 VEC(tree,heap) *block_stack;
3542 if (block == NULL_TREE)
3543 return;
3545 block_stack = VEC_alloc (tree, heap, 10);
3547 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3548 clear_block_marks (block);
3550 /* Prune the old trees away, so that they don't get in the way. */
3551 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3552 BLOCK_CHAIN (block) = NULL_TREE;
3554 /* Recreate the block tree from the note nesting. */
3555 reorder_blocks_1 (get_insns (), block, &block_stack);
3556 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3558 VEC_free (tree, heap, block_stack);
3561 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3563 void
3564 clear_block_marks (tree block)
3566 while (block)
3568 TREE_ASM_WRITTEN (block) = 0;
3569 clear_block_marks (BLOCK_SUBBLOCKS (block));
3570 block = BLOCK_CHAIN (block);
3574 static void
3575 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3577 rtx insn;
3579 for (insn = insns; insn; insn = NEXT_INSN (insn))
3581 if (NOTE_P (insn))
3583 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3585 tree block = NOTE_BLOCK (insn);
3586 tree origin;
3588 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3589 ? BLOCK_FRAGMENT_ORIGIN (block)
3590 : block);
3592 /* If we have seen this block before, that means it now
3593 spans multiple address regions. Create a new fragment. */
3594 if (TREE_ASM_WRITTEN (block))
3596 tree new_block = copy_node (block);
3598 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3599 BLOCK_FRAGMENT_CHAIN (new_block)
3600 = BLOCK_FRAGMENT_CHAIN (origin);
3601 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3603 NOTE_BLOCK (insn) = new_block;
3604 block = new_block;
3607 BLOCK_SUBBLOCKS (block) = 0;
3608 TREE_ASM_WRITTEN (block) = 1;
3609 /* When there's only one block for the entire function,
3610 current_block == block and we mustn't do this, it
3611 will cause infinite recursion. */
3612 if (block != current_block)
3614 if (block != origin)
3615 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3617 BLOCK_SUPERCONTEXT (block) = current_block;
3618 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3619 BLOCK_SUBBLOCKS (current_block) = block;
3620 current_block = origin;
3622 VEC_safe_push (tree, heap, *p_block_stack, block);
3624 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3626 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3627 BLOCK_SUBBLOCKS (current_block)
3628 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3629 current_block = BLOCK_SUPERCONTEXT (current_block);
3635 /* Reverse the order of elements in the chain T of blocks,
3636 and return the new head of the chain (old last element). */
3638 tree
3639 blocks_nreverse (tree t)
3641 tree prev = 0, decl, next;
3642 for (decl = t; decl; decl = next)
3644 next = BLOCK_CHAIN (decl);
3645 BLOCK_CHAIN (decl) = prev;
3646 prev = decl;
3648 return prev;
3651 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3652 non-NULL, list them all into VECTOR, in a depth-first preorder
3653 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3654 blocks. */
3656 static int
3657 all_blocks (tree block, tree *vector)
3659 int n_blocks = 0;
3661 while (block)
3663 TREE_ASM_WRITTEN (block) = 0;
3665 /* Record this block. */
3666 if (vector)
3667 vector[n_blocks] = block;
3669 ++n_blocks;
3671 /* Record the subblocks, and their subblocks... */
3672 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3673 vector ? vector + n_blocks : 0);
3674 block = BLOCK_CHAIN (block);
3677 return n_blocks;
3680 /* Return a vector containing all the blocks rooted at BLOCK. The
3681 number of elements in the vector is stored in N_BLOCKS_P. The
3682 vector is dynamically allocated; it is the caller's responsibility
3683 to call `free' on the pointer returned. */
3685 static tree *
3686 get_block_vector (tree block, int *n_blocks_p)
3688 tree *block_vector;
3690 *n_blocks_p = all_blocks (block, NULL);
3691 block_vector = XNEWVEC (tree, *n_blocks_p);
3692 all_blocks (block, block_vector);
3694 return block_vector;
3697 static GTY(()) int next_block_index = 2;
3699 /* Set BLOCK_NUMBER for all the blocks in FN. */
3701 void
3702 number_blocks (tree fn)
3704 int i;
3705 int n_blocks;
3706 tree *block_vector;
3708 /* For SDB and XCOFF debugging output, we start numbering the blocks
3709 from 1 within each function, rather than keeping a running
3710 count. */
3711 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3712 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3713 next_block_index = 1;
3714 #endif
3716 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3718 /* The top-level BLOCK isn't numbered at all. */
3719 for (i = 1; i < n_blocks; ++i)
3720 /* We number the blocks from two. */
3721 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3723 free (block_vector);
3725 return;
3728 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3730 tree
3731 debug_find_var_in_block_tree (tree var, tree block)
3733 tree t;
3735 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3736 if (t == var)
3737 return block;
3739 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3741 tree ret = debug_find_var_in_block_tree (var, t);
3742 if (ret)
3743 return ret;
3746 return NULL_TREE;
3749 /* Keep track of whether we're in a dummy function context. If we are,
3750 we don't want to invoke the set_current_function hook, because we'll
3751 get into trouble if the hook calls target_reinit () recursively or
3752 when the initial initialization is not yet complete. */
3754 static bool in_dummy_function;
3756 /* Invoke the target hook when setting cfun. */
3758 static void
3759 invoke_set_current_function_hook (tree fndecl)
3761 if (!in_dummy_function)
3762 targetm.set_current_function (fndecl);
3765 /* cfun should never be set directly; use this function. */
3767 void
3768 set_cfun (struct function *new_cfun)
3770 if (cfun != new_cfun)
3772 cfun = new_cfun;
3773 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3777 /* Keep track of the cfun stack. */
3779 typedef struct function *function_p;
3781 DEF_VEC_P(function_p);
3782 DEF_VEC_ALLOC_P(function_p,heap);
3784 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3786 static VEC(function_p,heap) *cfun_stack;
3788 /* We save the value of in_system_header here when pushing the first
3789 function on the cfun stack, and we restore it from here when
3790 popping the last function. */
3792 static bool saved_in_system_header;
3794 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3796 void
3797 push_cfun (struct function *new_cfun)
3799 if (cfun == NULL)
3800 saved_in_system_header = in_system_header;
3801 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3802 if (new_cfun)
3803 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3804 set_cfun (new_cfun);
3807 /* Pop cfun from the stack. */
3809 void
3810 pop_cfun (void)
3812 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3813 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3814 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3815 set_cfun (new_cfun);
3818 /* Return value of funcdef and increase it. */
3820 get_next_funcdef_no (void)
3822 return funcdef_no++;
3825 /* Allocate a function structure for FNDECL and set its contents
3826 to the defaults. Set cfun to the newly-allocated object.
3827 Some of the helper functions invoked during initialization assume
3828 that cfun has already been set. Therefore, assign the new object
3829 directly into cfun and invoke the back end hook explicitly at the
3830 very end, rather than initializing a temporary and calling set_cfun
3831 on it.
3833 ABSTRACT_P is true if this is a function that will never be seen by
3834 the middle-end. Such functions are front-end concepts (like C++
3835 function templates) that do not correspond directly to functions
3836 placed in object files. */
3838 void
3839 allocate_struct_function (tree fndecl, bool abstract_p)
3841 tree result;
3842 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3844 cfun = ggc_alloc_cleared (sizeof (struct function));
3846 current_function_funcdef_no = get_next_funcdef_no ();
3848 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3850 init_eh_for_function ();
3852 if (init_machine_status)
3853 cfun->machine = (*init_machine_status) ();
3855 if (fndecl != NULL_TREE)
3857 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3858 cfun->decl = fndecl;
3860 result = DECL_RESULT (fndecl);
3861 if (!abstract_p && aggregate_value_p (result, fndecl))
3863 #ifdef PCC_STATIC_STRUCT_RETURN
3864 cfun->returns_pcc_struct = 1;
3865 #endif
3866 cfun->returns_struct = 1;
3869 cfun->stdarg
3870 = (fntype
3871 && TYPE_ARG_TYPES (fntype) != 0
3872 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3873 != void_type_node));
3875 /* Assume all registers in stdarg functions need to be saved. */
3876 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3877 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3880 invoke_set_current_function_hook (fndecl);
3883 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3884 instead of just setting it. */
3886 void
3887 push_struct_function (tree fndecl)
3889 if (cfun == NULL)
3890 saved_in_system_header = in_system_header;
3891 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3892 if (fndecl)
3893 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3894 allocate_struct_function (fndecl, false);
3897 /* Reset cfun, and other non-struct-function variables to defaults as
3898 appropriate for emitting rtl at the start of a function. */
3900 static void
3901 prepare_function_start (void)
3903 gcc_assert (!crtl->emit.x_last_insn);
3904 init_emit ();
3905 init_varasm_status ();
3906 init_expr ();
3908 cse_not_expected = ! optimize;
3910 /* Caller save not needed yet. */
3911 caller_save_needed = 0;
3913 /* We haven't done register allocation yet. */
3914 reg_renumber = 0;
3916 /* Indicate that we have not instantiated virtual registers yet. */
3917 virtuals_instantiated = 0;
3919 /* Indicate that we want CONCATs now. */
3920 generating_concat_p = 1;
3922 /* Indicate we have no need of a frame pointer yet. */
3923 frame_pointer_needed = 0;
3926 /* Initialize the rtl expansion mechanism so that we can do simple things
3927 like generate sequences. This is used to provide a context during global
3928 initialization of some passes. You must call expand_dummy_function_end
3929 to exit this context. */
3931 void
3932 init_dummy_function_start (void)
3934 gcc_assert (!in_dummy_function);
3935 in_dummy_function = true;
3936 push_struct_function (NULL_TREE);
3937 prepare_function_start ();
3940 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3941 and initialize static variables for generating RTL for the statements
3942 of the function. */
3944 void
3945 init_function_start (tree subr)
3947 if (subr && DECL_STRUCT_FUNCTION (subr))
3948 set_cfun (DECL_STRUCT_FUNCTION (subr));
3949 else
3950 allocate_struct_function (subr, false);
3951 prepare_function_start ();
3953 /* Warn if this value is an aggregate type,
3954 regardless of which calling convention we are using for it. */
3955 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3956 warning (OPT_Waggregate_return, "function returns an aggregate");
3959 /* Make sure all values used by the optimization passes have sane
3960 defaults. */
3961 unsigned int
3962 init_function_for_compilation (void)
3964 reg_renumber = 0;
3966 /* No prologue/epilogue insns yet. Make sure that these vectors are
3967 empty. */
3968 gcc_assert (VEC_length (int, prologue) == 0);
3969 gcc_assert (VEC_length (int, epilogue) == 0);
3970 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3971 return 0;
3974 struct rtl_opt_pass pass_init_function =
3977 RTL_PASS,
3978 NULL, /* name */
3979 NULL, /* gate */
3980 init_function_for_compilation, /* execute */
3981 NULL, /* sub */
3982 NULL, /* next */
3983 0, /* static_pass_number */
3984 0, /* tv_id */
3985 0, /* properties_required */
3986 0, /* properties_provided */
3987 0, /* properties_destroyed */
3988 0, /* todo_flags_start */
3989 0 /* todo_flags_finish */
3994 void
3995 expand_main_function (void)
3997 #if (defined(INVOKE__main) \
3998 || (!defined(HAS_INIT_SECTION) \
3999 && !defined(INIT_SECTION_ASM_OP) \
4000 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4001 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4002 #endif
4005 /* Expand code to initialize the stack_protect_guard. This is invoked at
4006 the beginning of a function to be protected. */
4008 #ifndef HAVE_stack_protect_set
4009 # define HAVE_stack_protect_set 0
4010 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4011 #endif
4013 void
4014 stack_protect_prologue (void)
4016 tree guard_decl = targetm.stack_protect_guard ();
4017 rtx x, y;
4019 /* Avoid expand_expr here, because we don't want guard_decl pulled
4020 into registers unless absolutely necessary. And we know that
4021 crtl->stack_protect_guard is a local stack slot, so this skips
4022 all the fluff. */
4023 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4024 y = validize_mem (DECL_RTL (guard_decl));
4026 /* Allow the target to copy from Y to X without leaking Y into a
4027 register. */
4028 if (HAVE_stack_protect_set)
4030 rtx insn = gen_stack_protect_set (x, y);
4031 if (insn)
4033 emit_insn (insn);
4034 return;
4038 /* Otherwise do a straight move. */
4039 emit_move_insn (x, y);
4042 /* Expand code to verify the stack_protect_guard. This is invoked at
4043 the end of a function to be protected. */
4045 #ifndef HAVE_stack_protect_test
4046 # define HAVE_stack_protect_test 0
4047 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4048 #endif
4050 void
4051 stack_protect_epilogue (void)
4053 tree guard_decl = targetm.stack_protect_guard ();
4054 rtx label = gen_label_rtx ();
4055 rtx x, y, tmp;
4057 /* Avoid expand_expr here, because we don't want guard_decl pulled
4058 into registers unless absolutely necessary. And we know that
4059 crtl->stack_protect_guard is a local stack slot, so this skips
4060 all the fluff. */
4061 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4062 y = validize_mem (DECL_RTL (guard_decl));
4064 /* Allow the target to compare Y with X without leaking either into
4065 a register. */
4066 switch (HAVE_stack_protect_test != 0)
4068 case 1:
4069 tmp = gen_stack_protect_test (x, y, label);
4070 if (tmp)
4072 emit_insn (tmp);
4073 break;
4075 /* FALLTHRU */
4077 default:
4078 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4079 break;
4082 /* The noreturn predictor has been moved to the tree level. The rtl-level
4083 predictors estimate this branch about 20%, which isn't enough to get
4084 things moved out of line. Since this is the only extant case of adding
4085 a noreturn function at the rtl level, it doesn't seem worth doing ought
4086 except adding the prediction by hand. */
4087 tmp = get_last_insn ();
4088 if (JUMP_P (tmp))
4089 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4091 expand_expr_stmt (targetm.stack_protect_fail ());
4092 emit_label (label);
4095 /* Start the RTL for a new function, and set variables used for
4096 emitting RTL.
4097 SUBR is the FUNCTION_DECL node.
4098 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4099 the function's parameters, which must be run at any return statement. */
4101 void
4102 expand_function_start (tree subr)
4104 /* Make sure volatile mem refs aren't considered
4105 valid operands of arithmetic insns. */
4106 init_recog_no_volatile ();
4108 crtl->profile
4109 = (profile_flag
4110 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4112 crtl->limit_stack
4113 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4115 /* Make the label for return statements to jump to. Do not special
4116 case machines with special return instructions -- they will be
4117 handled later during jump, ifcvt, or epilogue creation. */
4118 return_label = gen_label_rtx ();
4120 /* Initialize rtx used to return the value. */
4121 /* Do this before assign_parms so that we copy the struct value address
4122 before any library calls that assign parms might generate. */
4124 /* Decide whether to return the value in memory or in a register. */
4125 if (aggregate_value_p (DECL_RESULT (subr), subr))
4127 /* Returning something that won't go in a register. */
4128 rtx value_address = 0;
4130 #ifdef PCC_STATIC_STRUCT_RETURN
4131 if (cfun->returns_pcc_struct)
4133 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4134 value_address = assemble_static_space (size);
4136 else
4137 #endif
4139 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4140 /* Expect to be passed the address of a place to store the value.
4141 If it is passed as an argument, assign_parms will take care of
4142 it. */
4143 if (sv)
4145 value_address = gen_reg_rtx (Pmode);
4146 emit_move_insn (value_address, sv);
4149 if (value_address)
4151 rtx x = value_address;
4152 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4154 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4155 set_mem_attributes (x, DECL_RESULT (subr), 1);
4157 SET_DECL_RTL (DECL_RESULT (subr), x);
4160 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4161 /* If return mode is void, this decl rtl should not be used. */
4162 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4163 else
4165 /* Compute the return values into a pseudo reg, which we will copy
4166 into the true return register after the cleanups are done. */
4167 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4168 if (TYPE_MODE (return_type) != BLKmode
4169 && targetm.calls.return_in_msb (return_type))
4170 /* expand_function_end will insert the appropriate padding in
4171 this case. Use the return value's natural (unpadded) mode
4172 within the function proper. */
4173 SET_DECL_RTL (DECL_RESULT (subr),
4174 gen_reg_rtx (TYPE_MODE (return_type)));
4175 else
4177 /* In order to figure out what mode to use for the pseudo, we
4178 figure out what the mode of the eventual return register will
4179 actually be, and use that. */
4180 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4182 /* Structures that are returned in registers are not
4183 aggregate_value_p, so we may see a PARALLEL or a REG. */
4184 if (REG_P (hard_reg))
4185 SET_DECL_RTL (DECL_RESULT (subr),
4186 gen_reg_rtx (GET_MODE (hard_reg)));
4187 else
4189 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4190 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4194 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4195 result to the real return register(s). */
4196 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4199 /* Initialize rtx for parameters and local variables.
4200 In some cases this requires emitting insns. */
4201 assign_parms (subr);
4203 /* If function gets a static chain arg, store it. */
4204 if (cfun->static_chain_decl)
4206 tree parm = cfun->static_chain_decl;
4207 rtx local = gen_reg_rtx (Pmode);
4209 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4210 SET_DECL_RTL (parm, local);
4211 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4213 emit_move_insn (local, static_chain_incoming_rtx);
4216 /* If the function receives a non-local goto, then store the
4217 bits we need to restore the frame pointer. */
4218 if (cfun->nonlocal_goto_save_area)
4220 tree t_save;
4221 rtx r_save;
4223 /* ??? We need to do this save early. Unfortunately here is
4224 before the frame variable gets declared. Help out... */
4225 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4226 if (!DECL_RTL_SET_P (var))
4227 expand_decl (var);
4229 t_save = build4 (ARRAY_REF, ptr_type_node,
4230 cfun->nonlocal_goto_save_area,
4231 integer_zero_node, NULL_TREE, NULL_TREE);
4232 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4233 r_save = convert_memory_address (Pmode, r_save);
4235 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4236 update_nonlocal_goto_save_area ();
4239 /* The following was moved from init_function_start.
4240 The move is supposed to make sdb output more accurate. */
4241 /* Indicate the beginning of the function body,
4242 as opposed to parm setup. */
4243 emit_note (NOTE_INSN_FUNCTION_BEG);
4245 gcc_assert (NOTE_P (get_last_insn ()));
4247 parm_birth_insn = get_last_insn ();
4249 if (crtl->profile)
4251 #ifdef PROFILE_HOOK
4252 PROFILE_HOOK (current_function_funcdef_no);
4253 #endif
4256 /* After the display initializations is where the stack checking
4257 probe should go. */
4258 if(flag_stack_check)
4259 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4261 /* Make sure there is a line number after the function entry setup code. */
4262 force_next_line_note ();
4265 /* Undo the effects of init_dummy_function_start. */
4266 void
4267 expand_dummy_function_end (void)
4269 gcc_assert (in_dummy_function);
4271 /* End any sequences that failed to be closed due to syntax errors. */
4272 while (in_sequence_p ())
4273 end_sequence ();
4275 /* Outside function body, can't compute type's actual size
4276 until next function's body starts. */
4278 free_after_parsing (cfun);
4279 free_after_compilation (cfun);
4280 pop_cfun ();
4281 in_dummy_function = false;
4284 /* Call DOIT for each hard register used as a return value from
4285 the current function. */
4287 void
4288 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4290 rtx outgoing = crtl->return_rtx;
4292 if (! outgoing)
4293 return;
4295 if (REG_P (outgoing))
4296 (*doit) (outgoing, arg);
4297 else if (GET_CODE (outgoing) == PARALLEL)
4299 int i;
4301 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4303 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4305 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4306 (*doit) (x, arg);
4311 static void
4312 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4314 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4317 void
4318 clobber_return_register (void)
4320 diddle_return_value (do_clobber_return_reg, NULL);
4322 /* In case we do use pseudo to return value, clobber it too. */
4323 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4325 tree decl_result = DECL_RESULT (current_function_decl);
4326 rtx decl_rtl = DECL_RTL (decl_result);
4327 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4329 do_clobber_return_reg (decl_rtl, NULL);
4334 static void
4335 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4337 emit_insn (gen_rtx_USE (VOIDmode, reg));
4340 static void
4341 use_return_register (void)
4343 diddle_return_value (do_use_return_reg, NULL);
4346 /* Possibly warn about unused parameters. */
4347 void
4348 do_warn_unused_parameter (tree fn)
4350 tree decl;
4352 for (decl = DECL_ARGUMENTS (fn);
4353 decl; decl = TREE_CHAIN (decl))
4354 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4355 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4356 && !TREE_NO_WARNING (decl))
4357 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4360 static GTY(()) rtx initial_trampoline;
4362 /* Generate RTL for the end of the current function. */
4364 void
4365 expand_function_end (void)
4367 rtx clobber_after;
4369 /* If arg_pointer_save_area was referenced only from a nested
4370 function, we will not have initialized it yet. Do that now. */
4371 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4372 get_arg_pointer_save_area ();
4374 /* If we are doing stack checking and this function makes calls,
4375 do a stack probe at the start of the function to ensure we have enough
4376 space for another stack frame. */
4377 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4379 rtx insn, seq;
4381 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4382 if (CALL_P (insn))
4384 start_sequence ();
4385 probe_stack_range (STACK_CHECK_PROTECT,
4386 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4387 seq = get_insns ();
4388 end_sequence ();
4389 emit_insn_before (seq, stack_check_probe_note);
4390 break;
4394 /* End any sequences that failed to be closed due to syntax errors. */
4395 while (in_sequence_p ())
4396 end_sequence ();
4398 clear_pending_stack_adjust ();
4399 do_pending_stack_adjust ();
4401 /* Output a linenumber for the end of the function.
4402 SDB depends on this. */
4403 force_next_line_note ();
4404 set_curr_insn_source_location (input_location);
4406 /* Before the return label (if any), clobber the return
4407 registers so that they are not propagated live to the rest of
4408 the function. This can only happen with functions that drop
4409 through; if there had been a return statement, there would
4410 have either been a return rtx, or a jump to the return label.
4412 We delay actual code generation after the current_function_value_rtx
4413 is computed. */
4414 clobber_after = get_last_insn ();
4416 /* Output the label for the actual return from the function. */
4417 emit_label (return_label);
4419 if (USING_SJLJ_EXCEPTIONS)
4421 /* Let except.c know where it should emit the call to unregister
4422 the function context for sjlj exceptions. */
4423 if (flag_exceptions)
4424 sjlj_emit_function_exit_after (get_last_insn ());
4426 else
4428 /* We want to ensure that instructions that may trap are not
4429 moved into the epilogue by scheduling, because we don't
4430 always emit unwind information for the epilogue. */
4431 if (flag_non_call_exceptions)
4432 emit_insn (gen_blockage ());
4435 /* If this is an implementation of throw, do what's necessary to
4436 communicate between __builtin_eh_return and the epilogue. */
4437 expand_eh_return ();
4439 /* If scalar return value was computed in a pseudo-reg, or was a named
4440 return value that got dumped to the stack, copy that to the hard
4441 return register. */
4442 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4444 tree decl_result = DECL_RESULT (current_function_decl);
4445 rtx decl_rtl = DECL_RTL (decl_result);
4447 if (REG_P (decl_rtl)
4448 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4449 : DECL_REGISTER (decl_result))
4451 rtx real_decl_rtl = crtl->return_rtx;
4453 /* This should be set in assign_parms. */
4454 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4456 /* If this is a BLKmode structure being returned in registers,
4457 then use the mode computed in expand_return. Note that if
4458 decl_rtl is memory, then its mode may have been changed,
4459 but that crtl->return_rtx has not. */
4460 if (GET_MODE (real_decl_rtl) == BLKmode)
4461 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4463 /* If a non-BLKmode return value should be padded at the least
4464 significant end of the register, shift it left by the appropriate
4465 amount. BLKmode results are handled using the group load/store
4466 machinery. */
4467 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4468 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4470 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4471 REGNO (real_decl_rtl)),
4472 decl_rtl);
4473 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4475 /* If a named return value dumped decl_return to memory, then
4476 we may need to re-do the PROMOTE_MODE signed/unsigned
4477 extension. */
4478 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4480 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4482 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4483 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4484 &unsignedp, 1);
4486 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4488 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4490 /* If expand_function_start has created a PARALLEL for decl_rtl,
4491 move the result to the real return registers. Otherwise, do
4492 a group load from decl_rtl for a named return. */
4493 if (GET_CODE (decl_rtl) == PARALLEL)
4494 emit_group_move (real_decl_rtl, decl_rtl);
4495 else
4496 emit_group_load (real_decl_rtl, decl_rtl,
4497 TREE_TYPE (decl_result),
4498 int_size_in_bytes (TREE_TYPE (decl_result)));
4500 /* In the case of complex integer modes smaller than a word, we'll
4501 need to generate some non-trivial bitfield insertions. Do that
4502 on a pseudo and not the hard register. */
4503 else if (GET_CODE (decl_rtl) == CONCAT
4504 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4505 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4507 int old_generating_concat_p;
4508 rtx tmp;
4510 old_generating_concat_p = generating_concat_p;
4511 generating_concat_p = 0;
4512 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4513 generating_concat_p = old_generating_concat_p;
4515 emit_move_insn (tmp, decl_rtl);
4516 emit_move_insn (real_decl_rtl, tmp);
4518 else
4519 emit_move_insn (real_decl_rtl, decl_rtl);
4523 /* If returning a structure, arrange to return the address of the value
4524 in a place where debuggers expect to find it.
4526 If returning a structure PCC style,
4527 the caller also depends on this value.
4528 And cfun->returns_pcc_struct is not necessarily set. */
4529 if (cfun->returns_struct
4530 || cfun->returns_pcc_struct)
4532 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4533 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4534 rtx outgoing;
4536 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4537 type = TREE_TYPE (type);
4538 else
4539 value_address = XEXP (value_address, 0);
4541 outgoing = targetm.calls.function_value (build_pointer_type (type),
4542 current_function_decl, true);
4544 /* Mark this as a function return value so integrate will delete the
4545 assignment and USE below when inlining this function. */
4546 REG_FUNCTION_VALUE_P (outgoing) = 1;
4548 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4549 value_address = convert_memory_address (GET_MODE (outgoing),
4550 value_address);
4552 emit_move_insn (outgoing, value_address);
4554 /* Show return register used to hold result (in this case the address
4555 of the result. */
4556 crtl->return_rtx = outgoing;
4559 /* Emit the actual code to clobber return register. */
4561 rtx seq;
4563 start_sequence ();
4564 clobber_return_register ();
4565 expand_naked_return ();
4566 seq = get_insns ();
4567 end_sequence ();
4569 emit_insn_after (seq, clobber_after);
4572 /* Output the label for the naked return from the function. */
4573 emit_label (naked_return_label);
4575 /* @@@ This is a kludge. We want to ensure that instructions that
4576 may trap are not moved into the epilogue by scheduling, because
4577 we don't always emit unwind information for the epilogue. */
4578 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4579 emit_insn (gen_blockage ());
4581 /* If stack protection is enabled for this function, check the guard. */
4582 if (crtl->stack_protect_guard)
4583 stack_protect_epilogue ();
4585 /* If we had calls to alloca, and this machine needs
4586 an accurate stack pointer to exit the function,
4587 insert some code to save and restore the stack pointer. */
4588 if (! EXIT_IGNORE_STACK
4589 && cfun->calls_alloca)
4591 rtx tem = 0;
4593 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4594 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4597 /* ??? This should no longer be necessary since stupid is no longer with
4598 us, but there are some parts of the compiler (eg reload_combine, and
4599 sh mach_dep_reorg) that still try and compute their own lifetime info
4600 instead of using the general framework. */
4601 use_return_register ();
4605 get_arg_pointer_save_area (void)
4607 rtx ret = arg_pointer_save_area;
4609 if (! ret)
4611 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4612 arg_pointer_save_area = ret;
4615 if (! crtl->arg_pointer_save_area_init)
4617 rtx seq;
4619 /* Save the arg pointer at the beginning of the function. The
4620 generated stack slot may not be a valid memory address, so we
4621 have to check it and fix it if necessary. */
4622 start_sequence ();
4623 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4624 seq = get_insns ();
4625 end_sequence ();
4627 push_topmost_sequence ();
4628 emit_insn_after (seq, entry_of_function ());
4629 pop_topmost_sequence ();
4632 return ret;
4635 /* Extend a vector that records the INSN_UIDs of INSNS
4636 (a list of one or more insns). */
4638 static void
4639 record_insns (rtx insns, VEC(int,heap) **vecp)
4641 rtx tmp;
4643 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4644 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4647 /* Set the locator of the insn chain starting at INSN to LOC. */
4648 static void
4649 set_insn_locators (rtx insn, int loc)
4651 while (insn != NULL_RTX)
4653 if (INSN_P (insn))
4654 INSN_LOCATOR (insn) = loc;
4655 insn = NEXT_INSN (insn);
4659 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4660 be running after reorg, SEQUENCE rtl is possible. */
4662 static int
4663 contains (const_rtx insn, VEC(int,heap) **vec)
4665 int i, j;
4667 if (NONJUMP_INSN_P (insn)
4668 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4670 int count = 0;
4671 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4672 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4673 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4674 == VEC_index (int, *vec, j))
4675 count++;
4676 return count;
4678 else
4680 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4681 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4682 return 1;
4684 return 0;
4688 prologue_epilogue_contains (const_rtx insn)
4690 if (contains (insn, &prologue))
4691 return 1;
4692 if (contains (insn, &epilogue))
4693 return 1;
4694 return 0;
4698 sibcall_epilogue_contains (const_rtx insn)
4700 if (sibcall_epilogue)
4701 return contains (insn, &sibcall_epilogue);
4702 return 0;
4705 #ifdef HAVE_return
4706 /* Insert gen_return at the end of block BB. This also means updating
4707 block_for_insn appropriately. */
4709 static void
4710 emit_return_into_block (basic_block bb)
4712 emit_jump_insn_after (gen_return (), BB_END (bb));
4714 #endif /* HAVE_return */
4716 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4717 this into place with notes indicating where the prologue ends and where
4718 the epilogue begins. Update the basic block information when possible. */
4720 static void
4721 thread_prologue_and_epilogue_insns (void)
4723 int inserted = 0;
4724 edge e;
4725 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4726 rtx seq;
4727 #endif
4728 #if defined (HAVE_epilogue) || defined(HAVE_return)
4729 rtx epilogue_end = NULL_RTX;
4730 #endif
4731 edge_iterator ei;
4733 #ifdef HAVE_prologue
4734 if (HAVE_prologue)
4736 start_sequence ();
4737 seq = gen_prologue ();
4738 emit_insn (seq);
4740 /* Insert an explicit USE for the frame pointer
4741 if the profiling is on and the frame pointer is required. */
4742 if (crtl->profile && frame_pointer_needed)
4743 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
4745 /* Retain a map of the prologue insns. */
4746 record_insns (seq, &prologue);
4747 emit_note (NOTE_INSN_PROLOGUE_END);
4749 #ifndef PROFILE_BEFORE_PROLOGUE
4750 /* Ensure that instructions are not moved into the prologue when
4751 profiling is on. The call to the profiling routine can be
4752 emitted within the live range of a call-clobbered register. */
4753 if (crtl->profile)
4754 emit_insn (gen_blockage ());
4755 #endif
4757 seq = get_insns ();
4758 end_sequence ();
4759 set_insn_locators (seq, prologue_locator);
4761 /* Can't deal with multiple successors of the entry block
4762 at the moment. Function should always have at least one
4763 entry point. */
4764 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4766 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4767 inserted = 1;
4769 #endif
4771 /* If the exit block has no non-fake predecessors, we don't need
4772 an epilogue. */
4773 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4774 if ((e->flags & EDGE_FAKE) == 0)
4775 break;
4776 if (e == NULL)
4777 goto epilogue_done;
4779 #ifdef HAVE_return
4780 if (optimize && HAVE_return)
4782 /* If we're allowed to generate a simple return instruction,
4783 then by definition we don't need a full epilogue. Examine
4784 the block that falls through to EXIT. If it does not
4785 contain any code, examine its predecessors and try to
4786 emit (conditional) return instructions. */
4788 basic_block last;
4789 rtx label;
4791 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4792 if (e->flags & EDGE_FALLTHRU)
4793 break;
4794 if (e == NULL)
4795 goto epilogue_done;
4796 last = e->src;
4798 /* Verify that there are no active instructions in the last block. */
4799 label = BB_END (last);
4800 while (label && !LABEL_P (label))
4802 if (active_insn_p (label))
4803 break;
4804 label = PREV_INSN (label);
4807 if (BB_HEAD (last) == label && LABEL_P (label))
4809 edge_iterator ei2;
4811 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4813 basic_block bb = e->src;
4814 rtx jump;
4816 if (bb == ENTRY_BLOCK_PTR)
4818 ei_next (&ei2);
4819 continue;
4822 jump = BB_END (bb);
4823 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4825 ei_next (&ei2);
4826 continue;
4829 /* If we have an unconditional jump, we can replace that
4830 with a simple return instruction. */
4831 if (simplejump_p (jump))
4833 emit_return_into_block (bb);
4834 delete_insn (jump);
4837 /* If we have a conditional jump, we can try to replace
4838 that with a conditional return instruction. */
4839 else if (condjump_p (jump))
4841 if (! redirect_jump (jump, 0, 0))
4843 ei_next (&ei2);
4844 continue;
4847 /* If this block has only one successor, it both jumps
4848 and falls through to the fallthru block, so we can't
4849 delete the edge. */
4850 if (single_succ_p (bb))
4852 ei_next (&ei2);
4853 continue;
4856 else
4858 ei_next (&ei2);
4859 continue;
4862 /* Fix up the CFG for the successful change we just made. */
4863 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4866 /* Emit a return insn for the exit fallthru block. Whether
4867 this is still reachable will be determined later. */
4869 emit_barrier_after (BB_END (last));
4870 emit_return_into_block (last);
4871 epilogue_end = BB_END (last);
4872 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4873 goto epilogue_done;
4876 #endif
4877 /* Find the edge that falls through to EXIT. Other edges may exist
4878 due to RETURN instructions, but those don't need epilogues.
4879 There really shouldn't be a mixture -- either all should have
4880 been converted or none, however... */
4882 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4883 if (e->flags & EDGE_FALLTHRU)
4884 break;
4885 if (e == NULL)
4886 goto epilogue_done;
4888 #ifdef HAVE_epilogue
4889 if (HAVE_epilogue)
4891 start_sequence ();
4892 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
4893 seq = gen_epilogue ();
4894 emit_jump_insn (seq);
4896 /* Retain a map of the epilogue insns. */
4897 record_insns (seq, &epilogue);
4898 set_insn_locators (seq, epilogue_locator);
4900 seq = get_insns ();
4901 end_sequence ();
4903 insert_insn_on_edge (seq, e);
4904 inserted = 1;
4906 else
4907 #endif
4909 basic_block cur_bb;
4911 if (! next_active_insn (BB_END (e->src)))
4912 goto epilogue_done;
4913 /* We have a fall-through edge to the exit block, the source is not
4914 at the end of the function, and there will be an assembler epilogue
4915 at the end of the function.
4916 We can't use force_nonfallthru here, because that would try to
4917 use return. Inserting a jump 'by hand' is extremely messy, so
4918 we take advantage of cfg_layout_finalize using
4919 fixup_fallthru_exit_predecessor. */
4920 cfg_layout_initialize (0);
4921 FOR_EACH_BB (cur_bb)
4922 if (cur_bb->index >= NUM_FIXED_BLOCKS
4923 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
4924 cur_bb->aux = cur_bb->next_bb;
4925 cfg_layout_finalize ();
4927 epilogue_done:
4929 if (inserted)
4931 commit_edge_insertions ();
4933 /* The epilogue insns we inserted may cause the exit edge to no longer
4934 be fallthru. */
4935 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4937 if (((e->flags & EDGE_FALLTHRU) != 0)
4938 && returnjump_p (BB_END (e->src)))
4939 e->flags &= ~EDGE_FALLTHRU;
4943 #ifdef HAVE_sibcall_epilogue
4944 /* Emit sibling epilogues before any sibling call sites. */
4945 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
4947 basic_block bb = e->src;
4948 rtx insn = BB_END (bb);
4950 if (!CALL_P (insn)
4951 || ! SIBLING_CALL_P (insn))
4953 ei_next (&ei);
4954 continue;
4957 start_sequence ();
4958 emit_insn (gen_sibcall_epilogue ());
4959 seq = get_insns ();
4960 end_sequence ();
4962 /* Retain a map of the epilogue insns. Used in life analysis to
4963 avoid getting rid of sibcall epilogue insns. Do this before we
4964 actually emit the sequence. */
4965 record_insns (seq, &sibcall_epilogue);
4966 set_insn_locators (seq, epilogue_locator);
4968 emit_insn_before (seq, insn);
4969 ei_next (&ei);
4971 #endif
4973 #ifdef HAVE_epilogue
4974 if (epilogue_end)
4976 rtx insn, next;
4978 /* Similarly, move any line notes that appear after the epilogue.
4979 There is no need, however, to be quite so anal about the existence
4980 of such a note. Also possibly move
4981 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
4982 info generation. */
4983 for (insn = epilogue_end; insn; insn = next)
4985 next = NEXT_INSN (insn);
4986 if (NOTE_P (insn)
4987 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
4988 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
4991 #endif
4993 /* Threading the prologue and epilogue changes the artificial refs
4994 in the entry and exit blocks. */
4995 epilogue_completed = 1;
4996 df_update_entry_exit_and_calls ();
4999 /* Reposition the prologue-end and epilogue-begin notes after instruction
5000 scheduling and delayed branch scheduling. */
5002 void
5003 reposition_prologue_and_epilogue_notes (void)
5005 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5006 rtx insn, last, note;
5007 int len;
5009 if ((len = VEC_length (int, prologue)) > 0)
5011 last = 0, note = 0;
5013 /* Scan from the beginning until we reach the last prologue insn.
5014 We apparently can't depend on basic_block_{head,end} after
5015 reorg has run. */
5016 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5018 if (NOTE_P (insn))
5020 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5021 note = insn;
5023 else if (contains (insn, &prologue))
5025 last = insn;
5026 if (--len == 0)
5027 break;
5031 if (last)
5033 /* Find the prologue-end note if we haven't already, and
5034 move it to just after the last prologue insn. */
5035 if (note == 0)
5037 for (note = last; (note = NEXT_INSN (note));)
5038 if (NOTE_P (note)
5039 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5040 break;
5043 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5044 if (LABEL_P (last))
5045 last = NEXT_INSN (last);
5046 reorder_insns (note, note, last);
5050 if ((len = VEC_length (int, epilogue)) > 0)
5052 last = 0, note = 0;
5054 /* Scan from the end until we reach the first epilogue insn.
5055 We apparently can't depend on basic_block_{head,end} after
5056 reorg has run. */
5057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5059 if (NOTE_P (insn))
5061 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5062 note = insn;
5064 else if (contains (insn, &epilogue))
5066 last = insn;
5067 if (--len == 0)
5068 break;
5072 if (last)
5074 /* Find the epilogue-begin note if we haven't already, and
5075 move it to just before the first epilogue insn. */
5076 if (note == 0)
5078 for (note = insn; (note = PREV_INSN (note));)
5079 if (NOTE_P (note)
5080 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5081 break;
5084 if (PREV_INSN (last) != note)
5085 reorder_insns (note, note, PREV_INSN (last));
5088 #endif /* HAVE_prologue or HAVE_epilogue */
5091 /* Returns the name of the current function. */
5092 const char *
5093 current_function_name (void)
5095 return lang_hooks.decl_printable_name (cfun->decl, 2);
5098 /* Returns the raw (mangled) name of the current function. */
5099 const char *
5100 current_function_assembler_name (void)
5102 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5106 static unsigned int
5107 rest_of_handle_check_leaf_regs (void)
5109 #ifdef LEAF_REGISTERS
5110 current_function_uses_only_leaf_regs
5111 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5112 #endif
5113 return 0;
5116 /* Insert a TYPE into the used types hash table of CFUN. */
5117 static void
5118 used_types_insert_helper (tree type, struct function *func)
5120 if (type != NULL && func != NULL)
5122 void **slot;
5124 if (func->used_types_hash == NULL)
5125 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5126 htab_eq_pointer, NULL);
5127 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5128 if (*slot == NULL)
5129 *slot = type;
5133 /* Given a type, insert it into the used hash table in cfun. */
5134 void
5135 used_types_insert (tree t)
5137 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5138 t = TREE_TYPE (t);
5139 t = TYPE_MAIN_VARIANT (t);
5140 if (debug_info_level > DINFO_LEVEL_NONE)
5141 used_types_insert_helper (t, cfun);
5144 struct rtl_opt_pass pass_leaf_regs =
5147 RTL_PASS,
5148 NULL, /* name */
5149 NULL, /* gate */
5150 rest_of_handle_check_leaf_regs, /* execute */
5151 NULL, /* sub */
5152 NULL, /* next */
5153 0, /* static_pass_number */
5154 0, /* tv_id */
5155 0, /* properties_required */
5156 0, /* properties_provided */
5157 0, /* properties_destroyed */
5158 0, /* todo_flags_start */
5159 0 /* todo_flags_finish */
5163 static unsigned int
5164 rest_of_handle_thread_prologue_and_epilogue (void)
5166 if (optimize)
5167 cleanup_cfg (CLEANUP_EXPENSIVE);
5168 /* On some machines, the prologue and epilogue code, or parts thereof,
5169 can be represented as RTL. Doing so lets us schedule insns between
5170 it and the rest of the code and also allows delayed branch
5171 scheduling to operate in the epilogue. */
5173 thread_prologue_and_epilogue_insns ();
5174 return 0;
5177 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5180 RTL_PASS,
5181 "pro_and_epilogue", /* name */
5182 NULL, /* gate */
5183 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5184 NULL, /* sub */
5185 NULL, /* next */
5186 0, /* static_pass_number */
5187 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5188 0, /* properties_required */
5189 0, /* properties_provided */
5190 0, /* properties_destroyed */
5191 TODO_verify_flow, /* todo_flags_start */
5192 TODO_dump_func |
5193 TODO_df_verify |
5194 TODO_df_finish | TODO_verify_rtl_sharing |
5195 TODO_ggc_collect /* todo_flags_finish */
5200 /* This mini-pass fixes fall-out from SSA in asm statements that have
5201 in-out constraints. Say you start with
5203 orig = inout;
5204 asm ("": "+mr" (inout));
5205 use (orig);
5207 which is transformed very early to use explicit output and match operands:
5209 orig = inout;
5210 asm ("": "=mr" (inout) : "0" (inout));
5211 use (orig);
5213 Or, after SSA and copyprop,
5215 asm ("": "=mr" (inout_2) : "0" (inout_1));
5216 use (inout_1);
5218 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5219 they represent two separate values, so they will get different pseudo
5220 registers during expansion. Then, since the two operands need to match
5221 per the constraints, but use different pseudo registers, reload can
5222 only register a reload for these operands. But reloads can only be
5223 satisfied by hardregs, not by memory, so we need a register for this
5224 reload, just because we are presented with non-matching operands.
5225 So, even though we allow memory for this operand, no memory can be
5226 used for it, just because the two operands don't match. This can
5227 cause reload failures on register-starved targets.
5229 So it's a symptom of reload not being able to use memory for reloads
5230 or, alternatively it's also a symptom of both operands not coming into
5231 reload as matching (in which case the pseudo could go to memory just
5232 fine, as the alternative allows it, and no reload would be necessary).
5233 We fix the latter problem here, by transforming
5235 asm ("": "=mr" (inout_2) : "0" (inout_1));
5237 back to
5239 inout_2 = inout_1;
5240 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5242 static void
5243 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5245 int i;
5246 bool changed = false;
5247 rtx op = SET_SRC (p_sets[0]);
5248 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5249 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5250 bool *output_matched = alloca (noutputs * sizeof (bool));
5252 memset (output_matched, 0, noutputs * sizeof (bool));
5253 for (i = 0; i < ninputs; i++)
5255 rtx input, output, insns;
5256 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5257 char *end;
5258 int match, j;
5260 match = strtoul (constraint, &end, 10);
5261 if (end == constraint)
5262 continue;
5264 gcc_assert (match < noutputs);
5265 output = SET_DEST (p_sets[match]);
5266 input = RTVEC_ELT (inputs, i);
5267 /* Only do the transformation for pseudos. */
5268 if (! REG_P (output)
5269 || rtx_equal_p (output, input)
5270 || (GET_MODE (input) != VOIDmode
5271 && GET_MODE (input) != GET_MODE (output)))
5272 continue;
5274 /* We can't do anything if the output is also used as input,
5275 as we're going to overwrite it. */
5276 for (j = 0; j < ninputs; j++)
5277 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5278 break;
5279 if (j != ninputs)
5280 continue;
5282 /* Avoid changing the same input several times. For
5283 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5284 only change in once (to out1), rather than changing it
5285 first to out1 and afterwards to out2. */
5286 if (i > 0)
5288 for (j = 0; j < noutputs; j++)
5289 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5290 break;
5291 if (j != noutputs)
5292 continue;
5294 output_matched[match] = true;
5296 start_sequence ();
5297 emit_move_insn (output, input);
5298 insns = get_insns ();
5299 end_sequence ();
5300 emit_insn_before (insns, insn);
5302 /* Now replace all mentions of the input with output. We can't
5303 just replace the occurence in inputs[i], as the register might
5304 also be used in some other input (or even in an address of an
5305 output), which would mean possibly increasing the number of
5306 inputs by one (namely 'output' in addition), which might pose
5307 a too complicated problem for reload to solve. E.g. this situation:
5309 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5311 Here 'input' is used in two occurrences as input (once for the
5312 input operand, once for the address in the second output operand).
5313 If we would replace only the occurence of the input operand (to
5314 make the matching) we would be left with this:
5316 output = input
5317 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5319 Now we suddenly have two different input values (containing the same
5320 value, but different pseudos) where we formerly had only one.
5321 With more complicated asms this might lead to reload failures
5322 which wouldn't have happen without this pass. So, iterate over
5323 all operands and replace all occurrences of the register used. */
5324 for (j = 0; j < noutputs; j++)
5325 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5326 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5327 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5328 input, output);
5329 for (j = 0; j < ninputs; j++)
5330 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5331 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5332 input, output);
5334 changed = true;
5337 if (changed)
5338 df_insn_rescan (insn);
5341 static unsigned
5342 rest_of_match_asm_constraints (void)
5344 basic_block bb;
5345 rtx insn, pat, *p_sets;
5346 int noutputs;
5348 if (!crtl->has_asm_statement)
5349 return 0;
5351 df_set_flags (DF_DEFER_INSN_RESCAN);
5352 FOR_EACH_BB (bb)
5354 FOR_BB_INSNS (bb, insn)
5356 if (!INSN_P (insn))
5357 continue;
5359 pat = PATTERN (insn);
5360 if (GET_CODE (pat) == PARALLEL)
5361 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5362 else if (GET_CODE (pat) == SET)
5363 p_sets = &PATTERN (insn), noutputs = 1;
5364 else
5365 continue;
5367 if (GET_CODE (*p_sets) == SET
5368 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5369 match_asm_constraints_1 (insn, p_sets, noutputs);
5373 return TODO_df_finish;
5376 struct rtl_opt_pass pass_match_asm_constraints =
5379 RTL_PASS,
5380 "asmcons", /* name */
5381 NULL, /* gate */
5382 rest_of_match_asm_constraints, /* execute */
5383 NULL, /* sub */
5384 NULL, /* next */
5385 0, /* static_pass_number */
5386 0, /* tv_id */
5387 0, /* properties_required */
5388 0, /* properties_provided */
5389 0, /* properties_destroyed */
5390 0, /* todo_flags_start */
5391 TODO_dump_func /* todo_flags_finish */
5396 #include "gt-function.h"