2007-01-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / function.c
blobfa74b80f5274973ff730d6763731506d9ed3b6e8
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
66 #include "vecprim.h"
67 #include "cgraph.h"
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #endif
73 #ifndef STACK_ALIGNMENT_NEEDED
74 #define STACK_ALIGNMENT_NEEDED 1
75 #endif
77 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #endif
87 /* Round a value to the lowest integer less than it that is a multiple of
88 the required alignment. Avoid using division in case the value is
89 negative. Assume the alignment is a power of two. */
90 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92 /* Similar, but round to the next highest integer that meets the
93 alignment. */
94 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96 /* Nonzero if function being compiled doesn't contain any calls
97 (ignoring the prologue and epilogue). This is set prior to
98 local register allocation and is valid for the remaining
99 compiler passes. */
100 int current_function_is_leaf;
102 /* Nonzero if function being compiled doesn't modify the stack pointer
103 (ignoring the prologue and epilogue). This is only valid after
104 life_analysis has run. */
105 int current_function_sp_is_unchanging;
107 /* Nonzero if the function being compiled is a leaf function which only
108 uses leaf registers. This is valid after reload (specifically after
109 sched2) and is useful only if the port defines LEAF_REGISTERS. */
110 int current_function_uses_only_leaf_regs;
112 /* Nonzero once virtual register instantiation has been done.
113 assign_stack_local uses frame_pointer_rtx when this is nonzero.
114 calls.c:emit_library_call_value_1 uses it to set up
115 post-instantiation libcalls. */
116 int virtuals_instantiated;
118 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
119 static GTY(()) int funcdef_no;
121 /* These variables hold pointers to functions to create and destroy
122 target specific, per-function data structures. */
123 struct machine_function * (*init_machine_status) (void);
125 /* The currently compiled function. */
126 struct function *cfun = 0;
128 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
129 static VEC(int,heap) *prologue;
130 static VEC(int,heap) *epilogue;
132 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 in this function. */
134 static VEC(int,heap) *sibcall_epilogue;
136 /* In order to evaluate some expressions, such as function calls returning
137 structures in memory, we need to temporarily allocate stack locations.
138 We record each allocated temporary in the following structure.
140 Associated with each temporary slot is a nesting level. When we pop up
141 one level, all temporaries associated with the previous level are freed.
142 Normally, all temporaries are freed after the execution of the statement
143 in which they were created. However, if we are inside a ({...}) grouping,
144 the result may be in a temporary and hence must be preserved. If the
145 result could be in a temporary, we preserve it if we can determine which
146 one it is in. If we cannot determine which temporary may contain the
147 result, all temporaries are preserved. A temporary is preserved by
148 pretending it was allocated at the previous nesting level.
150 Automatic variables are also assigned temporary slots, at the nesting
151 level where they are defined. They are marked a "kept" so that
152 free_temp_slots will not free them. */
154 struct temp_slot GTY(())
156 /* Points to next temporary slot. */
157 struct temp_slot *next;
158 /* Points to previous temporary slot. */
159 struct temp_slot *prev;
161 /* The rtx to used to reference the slot. */
162 rtx slot;
163 /* The rtx used to represent the address if not the address of the
164 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 rtx address;
166 /* The alignment (in bits) of the slot. */
167 unsigned int align;
168 /* The size, in units, of the slot. */
169 HOST_WIDE_INT size;
170 /* The type of the object in the slot, or zero if it doesn't correspond
171 to a type. We use this to determine whether a slot can be reused.
172 It can be reused if objects of the type of the new slot will always
173 conflict with objects of the type of the old slot. */
174 tree type;
175 /* Nonzero if this temporary is currently in use. */
176 char in_use;
177 /* Nonzero if this temporary has its address taken. */
178 char addr_taken;
179 /* Nesting level at which this slot is being used. */
180 int level;
181 /* Nonzero if this should survive a call to free_temp_slots. */
182 int keep;
183 /* The offset of the slot from the frame_pointer, including extra space
184 for alignment. This info is for combine_temp_slots. */
185 HOST_WIDE_INT base_offset;
186 /* The size of the slot, including extra space for alignment. This
187 info is for combine_temp_slots. */
188 HOST_WIDE_INT full_size;
191 /* Forward declarations. */
193 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
194 struct function *);
195 static struct temp_slot *find_temp_slot_from_address (rtx);
196 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
197 static void pad_below (struct args_size *, enum machine_mode, tree);
198 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205 static int contains (rtx, VEC(int,heap) **);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function *outer_function_chain;
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
223 struct function *
224 find_function_data (tree decl)
226 struct function *p;
228 for (p = outer_function_chain; p; p = p->outer)
229 if (p->decl == decl)
230 return p;
232 gcc_unreachable ();
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
239 variables. */
241 void
242 push_function_context_to (tree context ATTRIBUTE_UNUSED)
244 struct function *p;
246 if (cfun == 0)
247 init_dummy_function_start ();
248 p = cfun;
250 p->outer = outer_function_chain;
251 outer_function_chain = p;
253 lang_hooks.function.enter_nested (p);
255 cfun = 0;
258 void
259 push_function_context (void)
261 push_function_context_to (current_function_decl);
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
267 void
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
270 struct function *p = outer_function_chain;
272 cfun = p;
273 outer_function_chain = p->outer;
275 current_function_decl = p->decl;
277 lang_hooks.function.leave_nested (p);
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated = 0;
281 generating_concat_p = 1;
284 void
285 pop_function_context (void)
287 pop_function_context_from (current_function_decl);
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
294 void
295 free_after_parsing (struct function *f)
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
302 lang_hooks.function.final (f);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
309 void
310 free_after_compilation (struct function *f)
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
321 f->cfg = NULL;
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
328 f->x_nonlocal_goto_handler_labels = NULL;
329 f->x_return_label = NULL;
330 f->x_naked_return_label = NULL;
331 f->x_stack_slot_list = NULL;
332 f->x_stack_check_probe_note = NULL;
333 f->x_arg_pointer_save_area = NULL;
334 f->x_parm_birth_insn = NULL;
335 f->epilogue_delay_list = NULL;
338 /* Allocate fixed slots in the stack frame of the current function. */
340 /* Return size needed for stack frame based on slots so far allocated in
341 function F.
342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
343 the caller may have to do that. */
345 static HOST_WIDE_INT
346 get_func_frame_size (struct function *f)
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
350 else
351 return f->x_frame_offset;
354 /* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
358 HOST_WIDE_INT
359 get_frame_size (void)
361 return get_func_frame_size (cfun);
364 /* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
366 return FALSE. */
368 bool
369 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
377 error ("%Jtotal size of local objects too large", func);
378 return TRUE;
381 return FALSE;
384 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
390 -2 means use BITS_PER_UNIT,
391 positive specifies alignment boundary in bits.
393 We do not round to stack_boundary here.
395 FUNCTION specifies the function to allocate in. */
397 static rtx
398 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
401 rtx x, addr;
402 int bigend_correction = 0;
403 unsigned int alignment;
404 int frame_off, frame_alignment, frame_phase;
406 if (align == 0)
408 tree type;
410 if (mode == BLKmode)
411 alignment = BIGGEST_ALIGNMENT;
412 else
413 alignment = GET_MODE_ALIGNMENT (mode);
415 /* Allow the target to (possibly) increase the alignment of this
416 stack slot. */
417 type = lang_hooks.types.type_for_mode (mode, 0);
418 if (type)
419 alignment = LOCAL_ALIGNMENT (type, alignment);
421 alignment /= BITS_PER_UNIT;
423 else if (align == -1)
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
430 else
431 alignment = align / BITS_PER_UNIT;
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
443 /* Calculate how many bytes the start of local variables is off from
444 stack alignment. */
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
453 || mode != BLKmode
454 || size != 0)
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
464 + frame_phase);
465 else
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
469 + frame_phase);
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
475 bigend_correction = size - GET_MODE_SIZE (mode);
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
479 if (function == cfun && virtuals_instantiated)
480 addr = plus_constant (frame_pointer_rtx,
481 trunc_int_for_mode
482 (frame_offset + bigend_correction
483 + STARTING_FRAME_OFFSET, Pmode));
484 else
485 addr = plus_constant (virtual_stack_vars_rtx,
486 trunc_int_for_mode
487 (function->x_frame_offset + bigend_correction,
488 Pmode));
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
493 x = gen_rtx_MEM (mode, addr);
494 MEM_NOTRAP_P (x) = 1;
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
502 return x;
505 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
506 current function. */
509 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
511 return assign_stack_local_1 (mode, size, align, cfun);
515 /* Removes temporary slot TEMP from LIST. */
517 static void
518 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
520 if (temp->next)
521 temp->next->prev = temp->prev;
522 if (temp->prev)
523 temp->prev->next = temp->next;
524 else
525 *list = temp->next;
527 temp->prev = temp->next = NULL;
530 /* Inserts temporary slot TEMP to LIST. */
532 static void
533 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
535 temp->next = *list;
536 if (*list)
537 (*list)->prev = temp;
538 temp->prev = NULL;
539 *list = temp;
542 /* Returns the list of used temp slots at LEVEL. */
544 static struct temp_slot **
545 temp_slots_at_level (int level)
547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
549 size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
550 temp_slot_p *p;
552 VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
553 p = VEC_address (temp_slot_p, used_temp_slots);
554 memset (&p[old_length], 0,
555 sizeof (temp_slot_p) * (level + 1 - old_length));
558 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
561 /* Returns the maximal temporary slot level. */
563 static int
564 max_slot_level (void)
566 if (!used_temp_slots)
567 return -1;
569 return VEC_length (temp_slot_p, used_temp_slots) - 1;
572 /* Moves temporary slot TEMP to LEVEL. */
574 static void
575 move_slot_to_level (struct temp_slot *temp, int level)
577 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
578 insert_slot_to_list (temp, temp_slots_at_level (level));
579 temp->level = level;
582 /* Make temporary slot TEMP available. */
584 static void
585 make_slot_available (struct temp_slot *temp)
587 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
588 insert_slot_to_list (temp, &avail_temp_slots);
589 temp->in_use = 0;
590 temp->level = -1;
593 /* Allocate a temporary stack slot and record it for possible later
594 reuse.
596 MODE is the machine mode to be given to the returned rtx.
598 SIZE is the size in units of the space required. We do no rounding here
599 since assign_stack_local will do any required rounding.
601 KEEP is 1 if this slot is to be retained after a call to
602 free_temp_slots. Automatic variables for a block are allocated
603 with this flag. KEEP values of 2 or 3 were needed respectively
604 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
605 or for SAVE_EXPRs, but they are now unused.
607 TYPE is the type that will be used for the stack slot. */
610 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
611 int keep, tree type)
613 unsigned int align;
614 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
615 rtx slot;
617 /* If SIZE is -1 it means that somebody tried to allocate a temporary
618 of a variable size. */
619 gcc_assert (size != -1);
621 /* These are now unused. */
622 gcc_assert (keep <= 1);
624 if (mode == BLKmode)
625 align = BIGGEST_ALIGNMENT;
626 else
627 align = GET_MODE_ALIGNMENT (mode);
629 if (! type)
630 type = lang_hooks.types.type_for_mode (mode, 0);
632 if (type)
633 align = LOCAL_ALIGNMENT (type, align);
635 /* Try to find an available, already-allocated temporary of the proper
636 mode which meets the size and alignment requirements. Choose the
637 smallest one with the closest alignment.
639 If assign_stack_temp is called outside of the tree->rtl expansion,
640 we cannot reuse the stack slots (that may still refer to
641 VIRTUAL_STACK_VARS_REGNUM). */
642 if (!virtuals_instantiated)
644 for (p = avail_temp_slots; p; p = p->next)
646 if (p->align >= align && p->size >= size
647 && GET_MODE (p->slot) == mode
648 && objects_must_conflict_p (p->type, type)
649 && (best_p == 0 || best_p->size > p->size
650 || (best_p->size == p->size && best_p->align > p->align)))
652 if (p->align == align && p->size == size)
654 selected = p;
655 cut_slot_from_list (selected, &avail_temp_slots);
656 best_p = 0;
657 break;
659 best_p = p;
664 /* Make our best, if any, the one to use. */
665 if (best_p)
667 selected = best_p;
668 cut_slot_from_list (selected, &avail_temp_slots);
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
673 if (GET_MODE (best_p->slot) == BLKmode)
675 int alignment = best_p->align / BITS_PER_UNIT;
676 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
678 if (best_p->size - rounded_size >= alignment)
680 p = ggc_alloc (sizeof (struct temp_slot));
681 p->in_use = p->addr_taken = 0;
682 p->size = best_p->size - rounded_size;
683 p->base_offset = best_p->base_offset + rounded_size;
684 p->full_size = best_p->full_size - rounded_size;
685 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
686 p->align = best_p->align;
687 p->address = 0;
688 p->type = best_p->type;
689 insert_slot_to_list (p, &avail_temp_slots);
691 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
692 stack_slot_list);
694 best_p->size = rounded_size;
695 best_p->full_size = rounded_size;
700 /* If we still didn't find one, make a new temporary. */
701 if (selected == 0)
703 HOST_WIDE_INT frame_offset_old = frame_offset;
705 p = ggc_alloc (sizeof (struct temp_slot));
707 /* We are passing an explicit alignment request to assign_stack_local.
708 One side effect of that is assign_stack_local will not round SIZE
709 to ensure the frame offset remains suitably aligned.
711 So for requests which depended on the rounding of SIZE, we go ahead
712 and round it now. We also make sure ALIGNMENT is at least
713 BIGGEST_ALIGNMENT. */
714 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
715 p->slot = assign_stack_local (mode,
716 (mode == BLKmode
717 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
718 : size),
719 align);
721 p->align = align;
723 /* The following slot size computation is necessary because we don't
724 know the actual size of the temporary slot until assign_stack_local
725 has performed all the frame alignment and size rounding for the
726 requested temporary. Note that extra space added for alignment
727 can be either above or below this stack slot depending on which
728 way the frame grows. We include the extra space if and only if it
729 is above this slot. */
730 if (FRAME_GROWS_DOWNWARD)
731 p->size = frame_offset_old - frame_offset;
732 else
733 p->size = size;
735 /* Now define the fields used by combine_temp_slots. */
736 if (FRAME_GROWS_DOWNWARD)
738 p->base_offset = frame_offset;
739 p->full_size = frame_offset_old - frame_offset;
741 else
743 p->base_offset = frame_offset_old;
744 p->full_size = frame_offset - frame_offset_old;
746 p->address = 0;
748 selected = p;
751 p = selected;
752 p->in_use = 1;
753 p->addr_taken = 0;
754 p->type = type;
755 p->level = temp_slot_level;
756 p->keep = keep;
758 pp = temp_slots_at_level (p->level);
759 insert_slot_to_list (p, pp);
761 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
762 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
763 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
765 /* If we know the alias set for the memory that will be used, use
766 it. If there's no TYPE, then we don't know anything about the
767 alias set for the memory. */
768 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
769 set_mem_align (slot, align);
771 /* If a type is specified, set the relevant flags. */
772 if (type != 0)
774 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
775 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
777 MEM_NOTRAP_P (slot) = 1;
779 return slot;
782 /* Allocate a temporary stack slot and record it for possible later
783 reuse. First three arguments are same as in preceding function. */
786 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
788 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
791 /* Assign a temporary.
792 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
793 and so that should be used in error messages. In either case, we
794 allocate of the given type.
795 KEEP is as for assign_stack_temp.
796 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
797 it is 0 if a register is OK.
798 DONT_PROMOTE is 1 if we should not promote values in register
799 to wider modes. */
802 assign_temp (tree type_or_decl, int keep, int memory_required,
803 int dont_promote ATTRIBUTE_UNUSED)
805 tree type, decl;
806 enum machine_mode mode;
807 #ifdef PROMOTE_MODE
808 int unsignedp;
809 #endif
811 if (DECL_P (type_or_decl))
812 decl = type_or_decl, type = TREE_TYPE (decl);
813 else
814 decl = NULL, type = type_or_decl;
816 mode = TYPE_MODE (type);
817 #ifdef PROMOTE_MODE
818 unsignedp = TYPE_UNSIGNED (type);
819 #endif
821 if (mode == BLKmode || memory_required)
823 HOST_WIDE_INT size = int_size_in_bytes (type);
824 rtx tmp;
826 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
827 problems with allocating the stack space. */
828 if (size == 0)
829 size = 1;
831 /* Unfortunately, we don't yet know how to allocate variable-sized
832 temporaries. However, sometimes we can find a fixed upper limit on
833 the size, so try that instead. */
834 else if (size == -1)
835 size = max_int_size_in_bytes (type);
837 /* The size of the temporary may be too large to fit into an integer. */
838 /* ??? Not sure this should happen except for user silliness, so limit
839 this to things that aren't compiler-generated temporaries. The
840 rest of the time we'll die in assign_stack_temp_for_type. */
841 if (decl && size == -1
842 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
844 error ("size of variable %q+D is too large", decl);
845 size = 1;
848 tmp = assign_stack_temp_for_type (mode, size, keep, type);
849 return tmp;
852 #ifdef PROMOTE_MODE
853 if (! dont_promote)
854 mode = promote_mode (type, mode, &unsignedp, 0);
855 #endif
857 return gen_reg_rtx (mode);
860 /* Combine temporary stack slots which are adjacent on the stack.
862 This allows for better use of already allocated stack space. This is only
863 done for BLKmode slots because we can be sure that we won't have alignment
864 problems in this case. */
866 static void
867 combine_temp_slots (void)
869 struct temp_slot *p, *q, *next, *next_q;
870 int num_slots;
872 /* We can't combine slots, because the information about which slot
873 is in which alias set will be lost. */
874 if (flag_strict_aliasing)
875 return;
877 /* If there are a lot of temp slots, don't do anything unless
878 high levels of optimization. */
879 if (! flag_expensive_optimizations)
880 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
881 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
882 return;
884 for (p = avail_temp_slots; p; p = next)
886 int delete_p = 0;
888 next = p->next;
890 if (GET_MODE (p->slot) != BLKmode)
891 continue;
893 for (q = p->next; q; q = next_q)
895 int delete_q = 0;
897 next_q = q->next;
899 if (GET_MODE (q->slot) != BLKmode)
900 continue;
902 if (p->base_offset + p->full_size == q->base_offset)
904 /* Q comes after P; combine Q into P. */
905 p->size += q->size;
906 p->full_size += q->full_size;
907 delete_q = 1;
909 else if (q->base_offset + q->full_size == p->base_offset)
911 /* P comes after Q; combine P into Q. */
912 q->size += p->size;
913 q->full_size += p->full_size;
914 delete_p = 1;
915 break;
917 if (delete_q)
918 cut_slot_from_list (q, &avail_temp_slots);
921 /* Either delete P or advance past it. */
922 if (delete_p)
923 cut_slot_from_list (p, &avail_temp_slots);
927 /* Find the temp slot corresponding to the object at address X. */
929 static struct temp_slot *
930 find_temp_slot_from_address (rtx x)
932 struct temp_slot *p;
933 rtx next;
934 int i;
936 for (i = max_slot_level (); i >= 0; i--)
937 for (p = *temp_slots_at_level (i); p; p = p->next)
939 if (XEXP (p->slot, 0) == x
940 || p->address == x
941 || (GET_CODE (x) == PLUS
942 && XEXP (x, 0) == virtual_stack_vars_rtx
943 && GET_CODE (XEXP (x, 1)) == CONST_INT
944 && INTVAL (XEXP (x, 1)) >= p->base_offset
945 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
946 return p;
948 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
949 for (next = p->address; next; next = XEXP (next, 1))
950 if (XEXP (next, 0) == x)
951 return p;
954 /* If we have a sum involving a register, see if it points to a temp
955 slot. */
956 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
957 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
958 return p;
959 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
960 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
961 return p;
963 return 0;
966 /* Indicate that NEW is an alternate way of referring to the temp slot
967 that previously was known by OLD. */
969 void
970 update_temp_slot_address (rtx old, rtx new)
972 struct temp_slot *p;
974 if (rtx_equal_p (old, new))
975 return;
977 p = find_temp_slot_from_address (old);
979 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
980 is a register, see if one operand of the PLUS is a temporary
981 location. If so, NEW points into it. Otherwise, if both OLD and
982 NEW are a PLUS and if there is a register in common between them.
983 If so, try a recursive call on those values. */
984 if (p == 0)
986 if (GET_CODE (old) != PLUS)
987 return;
989 if (REG_P (new))
991 update_temp_slot_address (XEXP (old, 0), new);
992 update_temp_slot_address (XEXP (old, 1), new);
993 return;
995 else if (GET_CODE (new) != PLUS)
996 return;
998 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
999 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1000 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1003 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1004 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1007 return;
1010 /* Otherwise add an alias for the temp's address. */
1011 else if (p->address == 0)
1012 p->address = new;
1013 else
1015 if (GET_CODE (p->address) != EXPR_LIST)
1016 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1022 /* If X could be a reference to a temporary slot, mark the fact that its
1023 address was taken. */
1025 void
1026 mark_temp_addr_taken (rtx x)
1028 struct temp_slot *p;
1030 if (x == 0)
1031 return;
1033 /* If X is not in memory or is at a constant address, it cannot be in
1034 a temporary slot. */
1035 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1036 return;
1038 p = find_temp_slot_from_address (XEXP (x, 0));
1039 if (p != 0)
1040 p->addr_taken = 1;
1043 /* If X could be a reference to a temporary slot, mark that slot as
1044 belonging to the to one level higher than the current level. If X
1045 matched one of our slots, just mark that one. Otherwise, we can't
1046 easily predict which it is, so upgrade all of them. Kept slots
1047 need not be touched.
1049 This is called when an ({...}) construct occurs and a statement
1050 returns a value in memory. */
1052 void
1053 preserve_temp_slots (rtx x)
1055 struct temp_slot *p = 0, *next;
1057 /* If there is no result, we still might have some objects whose address
1058 were taken, so we need to make sure they stay around. */
1059 if (x == 0)
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1063 next = p->next;
1065 if (p->addr_taken)
1066 move_slot_to_level (p, temp_slot_level - 1);
1069 return;
1072 /* If X is a register that is being used as a pointer, see if we have
1073 a temporary slot we know it points to. To be consistent with
1074 the code below, we really should preserve all non-kept slots
1075 if we can't find a match, but that seems to be much too costly. */
1076 if (REG_P (x) && REG_POINTER (x))
1077 p = find_temp_slot_from_address (x);
1079 /* If X is not in memory or is at a constant address, it cannot be in
1080 a temporary slot, but it can contain something whose address was
1081 taken. */
1082 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1084 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1086 next = p->next;
1088 if (p->addr_taken)
1089 move_slot_to_level (p, temp_slot_level - 1);
1092 return;
1095 /* First see if we can find a match. */
1096 if (p == 0)
1097 p = find_temp_slot_from_address (XEXP (x, 0));
1099 if (p != 0)
1101 /* Move everything at our level whose address was taken to our new
1102 level in case we used its address. */
1103 struct temp_slot *q;
1105 if (p->level == temp_slot_level)
1107 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1109 next = q->next;
1111 if (p != q && q->addr_taken)
1112 move_slot_to_level (q, temp_slot_level - 1);
1115 move_slot_to_level (p, temp_slot_level - 1);
1116 p->addr_taken = 0;
1118 return;
1121 /* Otherwise, preserve all non-kept slots at this level. */
1122 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1124 next = p->next;
1126 if (!p->keep)
1127 move_slot_to_level (p, temp_slot_level - 1);
1131 /* Free all temporaries used so far. This is normally called at the
1132 end of generating code for a statement. */
1134 void
1135 free_temp_slots (void)
1137 struct temp_slot *p, *next;
1139 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1141 next = p->next;
1143 if (!p->keep)
1144 make_slot_available (p);
1147 combine_temp_slots ();
1150 /* Push deeper into the nesting level for stack temporaries. */
1152 void
1153 push_temp_slots (void)
1155 temp_slot_level++;
1158 /* Pop a temporary nesting level. All slots in use in the current level
1159 are freed. */
1161 void
1162 pop_temp_slots (void)
1164 struct temp_slot *p, *next;
1166 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1168 next = p->next;
1169 make_slot_available (p);
1172 combine_temp_slots ();
1174 temp_slot_level--;
1177 /* Initialize temporary slots. */
1179 void
1180 init_temp_slots (void)
1182 /* We have not allocated any temporaries yet. */
1183 avail_temp_slots = 0;
1184 used_temp_slots = 0;
1185 temp_slot_level = 0;
1188 /* These routines are responsible for converting virtual register references
1189 to the actual hard register references once RTL generation is complete.
1191 The following four variables are used for communication between the
1192 routines. They contain the offsets of the virtual registers from their
1193 respective hard registers. */
1195 static int in_arg_offset;
1196 static int var_offset;
1197 static int dynamic_offset;
1198 static int out_arg_offset;
1199 static int cfa_offset;
1201 /* In most machines, the stack pointer register is equivalent to the bottom
1202 of the stack. */
1204 #ifndef STACK_POINTER_OFFSET
1205 #define STACK_POINTER_OFFSET 0
1206 #endif
1208 /* If not defined, pick an appropriate default for the offset of dynamically
1209 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1210 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1212 #ifndef STACK_DYNAMIC_OFFSET
1214 /* The bottom of the stack points to the actual arguments. If
1215 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1216 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1217 stack space for register parameters is not pushed by the caller, but
1218 rather part of the fixed stack areas and hence not included in
1219 `current_function_outgoing_args_size'. Nevertheless, we must allow
1220 for it when allocating stack dynamic objects. */
1222 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1223 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1224 ((ACCUMULATE_OUTGOING_ARGS \
1225 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1226 + (STACK_POINTER_OFFSET)) \
1228 #else
1229 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1230 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1231 + (STACK_POINTER_OFFSET))
1232 #endif
1233 #endif
1236 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1237 is a virtual register, return the equivalent hard register and set the
1238 offset indirectly through the pointer. Otherwise, return 0. */
1240 static rtx
1241 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1243 rtx new;
1244 HOST_WIDE_INT offset;
1246 if (x == virtual_incoming_args_rtx)
1247 new = arg_pointer_rtx, offset = in_arg_offset;
1248 else if (x == virtual_stack_vars_rtx)
1249 new = frame_pointer_rtx, offset = var_offset;
1250 else if (x == virtual_stack_dynamic_rtx)
1251 new = stack_pointer_rtx, offset = dynamic_offset;
1252 else if (x == virtual_outgoing_args_rtx)
1253 new = stack_pointer_rtx, offset = out_arg_offset;
1254 else if (x == virtual_cfa_rtx)
1256 #ifdef FRAME_POINTER_CFA_OFFSET
1257 new = frame_pointer_rtx;
1258 #else
1259 new = arg_pointer_rtx;
1260 #endif
1261 offset = cfa_offset;
1263 else
1264 return NULL_RTX;
1266 *poffset = offset;
1267 return new;
1270 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1271 Instantiate any virtual registers present inside of *LOC. The expression
1272 is simplified, as much as possible, but is not to be considered "valid"
1273 in any sense implied by the target. If any change is made, set CHANGED
1274 to true. */
1276 static int
1277 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1279 HOST_WIDE_INT offset;
1280 bool *changed = (bool *) data;
1281 rtx x, new;
1283 x = *loc;
1284 if (x == 0)
1285 return 0;
1287 switch (GET_CODE (x))
1289 case REG:
1290 new = instantiate_new_reg (x, &offset);
1291 if (new)
1293 *loc = plus_constant (new, offset);
1294 if (changed)
1295 *changed = true;
1297 return -1;
1299 case PLUS:
1300 new = instantiate_new_reg (XEXP (x, 0), &offset);
1301 if (new)
1303 new = plus_constant (new, offset);
1304 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1305 if (changed)
1306 *changed = true;
1307 return -1;
1310 /* FIXME -- from old code */
1311 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1312 we can commute the PLUS and SUBREG because pointers into the
1313 frame are well-behaved. */
1314 break;
1316 default:
1317 break;
1320 return 0;
1323 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1324 matches the predicate for insn CODE operand OPERAND. */
1326 static int
1327 safe_insn_predicate (int code, int operand, rtx x)
1329 const struct insn_operand_data *op_data;
1331 if (code < 0)
1332 return true;
1334 op_data = &insn_data[code].operand[operand];
1335 if (op_data->predicate == NULL)
1336 return true;
1338 return op_data->predicate (x, op_data->mode);
1341 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1342 registers present inside of insn. The result will be a valid insn. */
1344 static void
1345 instantiate_virtual_regs_in_insn (rtx insn)
1347 HOST_WIDE_INT offset;
1348 int insn_code, i;
1349 bool any_change = false;
1350 rtx set, new, x, seq;
1352 /* There are some special cases to be handled first. */
1353 set = single_set (insn);
1354 if (set)
1356 /* We're allowed to assign to a virtual register. This is interpreted
1357 to mean that the underlying register gets assigned the inverse
1358 transformation. This is used, for example, in the handling of
1359 non-local gotos. */
1360 new = instantiate_new_reg (SET_DEST (set), &offset);
1361 if (new)
1363 start_sequence ();
1365 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1366 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1367 GEN_INT (-offset));
1368 x = force_operand (x, new);
1369 if (x != new)
1370 emit_move_insn (new, x);
1372 seq = get_insns ();
1373 end_sequence ();
1375 emit_insn_before (seq, insn);
1376 delete_insn (insn);
1377 return;
1380 /* Handle a straight copy from a virtual register by generating a
1381 new add insn. The difference between this and falling through
1382 to the generic case is avoiding a new pseudo and eliminating a
1383 move insn in the initial rtl stream. */
1384 new = instantiate_new_reg (SET_SRC (set), &offset);
1385 if (new && offset != 0
1386 && REG_P (SET_DEST (set))
1387 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1389 start_sequence ();
1391 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1392 new, GEN_INT (offset), SET_DEST (set),
1393 1, OPTAB_LIB_WIDEN);
1394 if (x != SET_DEST (set))
1395 emit_move_insn (SET_DEST (set), x);
1397 seq = get_insns ();
1398 end_sequence ();
1400 emit_insn_before (seq, insn);
1401 delete_insn (insn);
1402 return;
1405 extract_insn (insn);
1406 insn_code = INSN_CODE (insn);
1408 /* Handle a plus involving a virtual register by determining if the
1409 operands remain valid if they're modified in place. */
1410 if (GET_CODE (SET_SRC (set)) == PLUS
1411 && recog_data.n_operands >= 3
1412 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1413 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1414 && GET_CODE (recog_data.operand[2]) == CONST_INT
1415 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1417 offset += INTVAL (recog_data.operand[2]);
1419 /* If the sum is zero, then replace with a plain move. */
1420 if (offset == 0
1421 && REG_P (SET_DEST (set))
1422 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1424 start_sequence ();
1425 emit_move_insn (SET_DEST (set), new);
1426 seq = get_insns ();
1427 end_sequence ();
1429 emit_insn_before (seq, insn);
1430 delete_insn (insn);
1431 return;
1434 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1436 /* Using validate_change and apply_change_group here leaves
1437 recog_data in an invalid state. Since we know exactly what
1438 we want to check, do those two by hand. */
1439 if (safe_insn_predicate (insn_code, 1, new)
1440 && safe_insn_predicate (insn_code, 2, x))
1442 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1443 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1444 any_change = true;
1446 /* Fall through into the regular operand fixup loop in
1447 order to take care of operands other than 1 and 2. */
1451 else
1453 extract_insn (insn);
1454 insn_code = INSN_CODE (insn);
1457 /* In the general case, we expect virtual registers to appear only in
1458 operands, and then only as either bare registers or inside memories. */
1459 for (i = 0; i < recog_data.n_operands; ++i)
1461 x = recog_data.operand[i];
1462 switch (GET_CODE (x))
1464 case MEM:
1466 rtx addr = XEXP (x, 0);
1467 bool changed = false;
1469 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1470 if (!changed)
1471 continue;
1473 start_sequence ();
1474 x = replace_equiv_address (x, addr);
1475 seq = get_insns ();
1476 end_sequence ();
1477 if (seq)
1478 emit_insn_before (seq, insn);
1480 break;
1482 case REG:
1483 new = instantiate_new_reg (x, &offset);
1484 if (new == NULL)
1485 continue;
1486 if (offset == 0)
1487 x = new;
1488 else
1490 start_sequence ();
1492 /* Careful, special mode predicates may have stuff in
1493 insn_data[insn_code].operand[i].mode that isn't useful
1494 to us for computing a new value. */
1495 /* ??? Recognize address_operand and/or "p" constraints
1496 to see if (plus new offset) is a valid before we put
1497 this through expand_simple_binop. */
1498 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1499 GEN_INT (offset), NULL_RTX,
1500 1, OPTAB_LIB_WIDEN);
1501 seq = get_insns ();
1502 end_sequence ();
1503 emit_insn_before (seq, insn);
1505 break;
1507 case SUBREG:
1508 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1509 if (new == NULL)
1510 continue;
1511 if (offset != 0)
1513 start_sequence ();
1514 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1515 GEN_INT (offset), NULL_RTX,
1516 1, OPTAB_LIB_WIDEN);
1517 seq = get_insns ();
1518 end_sequence ();
1519 emit_insn_before (seq, insn);
1521 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1522 GET_MODE (new), SUBREG_BYTE (x));
1523 break;
1525 default:
1526 continue;
1529 /* At this point, X contains the new value for the operand.
1530 Validate the new value vs the insn predicate. Note that
1531 asm insns will have insn_code -1 here. */
1532 if (!safe_insn_predicate (insn_code, i, x))
1534 start_sequence ();
1535 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1536 seq = get_insns ();
1537 end_sequence ();
1538 if (seq)
1539 emit_insn_before (seq, insn);
1542 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1543 any_change = true;
1546 if (any_change)
1548 /* Propagate operand changes into the duplicates. */
1549 for (i = 0; i < recog_data.n_dups; ++i)
1550 *recog_data.dup_loc[i]
1551 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1553 /* Force re-recognition of the instruction for validation. */
1554 INSN_CODE (insn) = -1;
1557 if (asm_noperands (PATTERN (insn)) >= 0)
1559 if (!check_asm_operands (PATTERN (insn)))
1561 error_for_asm (insn, "impossible constraint in %<asm%>");
1562 delete_insn (insn);
1565 else
1567 if (recog_memoized (insn) < 0)
1568 fatal_insn_not_found (insn);
1572 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1573 do any instantiation required. */
1575 static void
1576 instantiate_decl (rtx x)
1578 rtx addr;
1580 if (x == 0)
1581 return;
1583 /* If this is a CONCAT, recurse for the pieces. */
1584 if (GET_CODE (x) == CONCAT)
1586 instantiate_decl (XEXP (x, 0));
1587 instantiate_decl (XEXP (x, 1));
1588 return;
1591 /* If this is not a MEM, no need to do anything. Similarly if the
1592 address is a constant or a register that is not a virtual register. */
1593 if (!MEM_P (x))
1594 return;
1596 addr = XEXP (x, 0);
1597 if (CONSTANT_P (addr)
1598 || (REG_P (addr)
1599 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1600 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1601 return;
1603 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1606 /* Helper for instantiate_decls called via walk_tree: Process all decls
1607 in the given DECL_VALUE_EXPR. */
1609 static tree
1610 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1612 tree t = *tp;
1613 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1615 *walk_subtrees = 0;
1616 if (DECL_P (t) && DECL_RTL_SET_P (t))
1617 instantiate_decl (DECL_RTL (t));
1619 return NULL;
1622 /* Subroutine of instantiate_decls: Process all decls in the given
1623 BLOCK node and all its subblocks. */
1625 static void
1626 instantiate_decls_1 (tree let)
1628 tree t;
1630 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1632 if (DECL_RTL_SET_P (t))
1633 instantiate_decl (DECL_RTL (t));
1634 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1636 tree v = DECL_VALUE_EXPR (t);
1637 walk_tree (&v, instantiate_expr, NULL, NULL);
1641 /* Process all subblocks. */
1642 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1643 instantiate_decls_1 (t);
1646 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1647 all virtual registers in their DECL_RTL's. */
1649 static void
1650 instantiate_decls (tree fndecl)
1652 tree decl;
1654 /* Process all parameters of the function. */
1655 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1657 instantiate_decl (DECL_RTL (decl));
1658 instantiate_decl (DECL_INCOMING_RTL (decl));
1659 if (DECL_HAS_VALUE_EXPR_P (decl))
1661 tree v = DECL_VALUE_EXPR (decl);
1662 walk_tree (&v, instantiate_expr, NULL, NULL);
1666 /* Now process all variables defined in the function or its subblocks. */
1667 instantiate_decls_1 (DECL_INITIAL (fndecl));
1670 /* Pass through the INSNS of function FNDECL and convert virtual register
1671 references to hard register references. */
1673 static unsigned int
1674 instantiate_virtual_regs (void)
1676 rtx insn;
1678 /* Compute the offsets to use for this function. */
1679 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1680 var_offset = STARTING_FRAME_OFFSET;
1681 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1682 out_arg_offset = STACK_POINTER_OFFSET;
1683 #ifdef FRAME_POINTER_CFA_OFFSET
1684 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1685 #else
1686 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1687 #endif
1689 /* Initialize recognition, indicating that volatile is OK. */
1690 init_recog ();
1692 /* Scan through all the insns, instantiating every virtual register still
1693 present. */
1694 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1695 if (INSN_P (insn))
1697 /* These patterns in the instruction stream can never be recognized.
1698 Fortunately, they shouldn't contain virtual registers either. */
1699 if (GET_CODE (PATTERN (insn)) == USE
1700 || GET_CODE (PATTERN (insn)) == CLOBBER
1701 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1702 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1703 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1704 continue;
1706 instantiate_virtual_regs_in_insn (insn);
1708 if (INSN_DELETED_P (insn))
1709 continue;
1711 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1713 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1714 if (GET_CODE (insn) == CALL_INSN)
1715 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1716 instantiate_virtual_regs_in_rtx, NULL);
1719 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1720 instantiate_decls (current_function_decl);
1722 /* Indicate that, from now on, assign_stack_local should use
1723 frame_pointer_rtx. */
1724 virtuals_instantiated = 1;
1725 return 0;
1728 struct tree_opt_pass pass_instantiate_virtual_regs =
1730 "vregs", /* name */
1731 NULL, /* gate */
1732 instantiate_virtual_regs, /* execute */
1733 NULL, /* sub */
1734 NULL, /* next */
1735 0, /* static_pass_number */
1736 0, /* tv_id */
1737 0, /* properties_required */
1738 0, /* properties_provided */
1739 0, /* properties_destroyed */
1740 0, /* todo_flags_start */
1741 TODO_dump_func, /* todo_flags_finish */
1742 0 /* letter */
1746 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1747 This means a type for which function calls must pass an address to the
1748 function or get an address back from the function.
1749 EXP may be a type node or an expression (whose type is tested). */
1752 aggregate_value_p (tree exp, tree fntype)
1754 int i, regno, nregs;
1755 rtx reg;
1757 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1759 /* DECL node associated with FNTYPE when relevant, which we might need to
1760 check for by-invisible-reference returns, typically for CALL_EXPR input
1761 EXPressions. */
1762 tree fndecl = NULL_TREE;
1764 if (fntype)
1765 switch (TREE_CODE (fntype))
1767 case CALL_EXPR:
1768 fndecl = get_callee_fndecl (fntype);
1769 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1770 break;
1771 case FUNCTION_DECL:
1772 fndecl = fntype;
1773 fntype = TREE_TYPE (fndecl);
1774 break;
1775 case FUNCTION_TYPE:
1776 case METHOD_TYPE:
1777 break;
1778 case IDENTIFIER_NODE:
1779 fntype = 0;
1780 break;
1781 default:
1782 /* We don't expect other rtl types here. */
1783 gcc_unreachable ();
1786 if (TREE_CODE (type) == VOID_TYPE)
1787 return 0;
1789 /* If the front end has decided that this needs to be passed by
1790 reference, do so. */
1791 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1792 && DECL_BY_REFERENCE (exp))
1793 return 1;
1795 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1796 called function RESULT_DECL, meaning the function returns in memory by
1797 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1798 on the function type, which used to be the way to request such a return
1799 mechanism but might now be causing troubles at gimplification time if
1800 temporaries with the function type need to be created. */
1801 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1802 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1803 return 1;
1805 if (targetm.calls.return_in_memory (type, fntype))
1806 return 1;
1807 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1808 and thus can't be returned in registers. */
1809 if (TREE_ADDRESSABLE (type))
1810 return 1;
1811 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1812 return 1;
1813 /* Make sure we have suitable call-clobbered regs to return
1814 the value in; if not, we must return it in memory. */
1815 reg = hard_function_value (type, 0, fntype, 0);
1817 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1818 it is OK. */
1819 if (!REG_P (reg))
1820 return 0;
1822 regno = REGNO (reg);
1823 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1824 for (i = 0; i < nregs; i++)
1825 if (! call_used_regs[regno + i])
1826 return 1;
1827 return 0;
1830 /* Return true if we should assign DECL a pseudo register; false if it
1831 should live on the local stack. */
1833 bool
1834 use_register_for_decl (tree decl)
1836 /* Honor volatile. */
1837 if (TREE_SIDE_EFFECTS (decl))
1838 return false;
1840 /* Honor addressability. */
1841 if (TREE_ADDRESSABLE (decl))
1842 return false;
1844 /* Only register-like things go in registers. */
1845 if (DECL_MODE (decl) == BLKmode)
1846 return false;
1848 /* If -ffloat-store specified, don't put explicit float variables
1849 into registers. */
1850 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1851 propagates values across these stores, and it probably shouldn't. */
1852 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1853 return false;
1855 /* If we're not interested in tracking debugging information for
1856 this decl, then we can certainly put it in a register. */
1857 if (DECL_IGNORED_P (decl))
1858 return true;
1860 return (optimize || DECL_REGISTER (decl));
1863 /* Return true if TYPE should be passed by invisible reference. */
1865 bool
1866 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1867 tree type, bool named_arg)
1869 if (type)
1871 /* If this type contains non-trivial constructors, then it is
1872 forbidden for the middle-end to create any new copies. */
1873 if (TREE_ADDRESSABLE (type))
1874 return true;
1876 /* GCC post 3.4 passes *all* variable sized types by reference. */
1877 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1878 return true;
1881 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1884 /* Return true if TYPE, which is passed by reference, should be callee
1885 copied instead of caller copied. */
1887 bool
1888 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1889 tree type, bool named_arg)
1891 if (type && TREE_ADDRESSABLE (type))
1892 return false;
1893 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1896 /* Structures to communicate between the subroutines of assign_parms.
1897 The first holds data persistent across all parameters, the second
1898 is cleared out for each parameter. */
1900 struct assign_parm_data_all
1902 CUMULATIVE_ARGS args_so_far;
1903 struct args_size stack_args_size;
1904 tree function_result_decl;
1905 tree orig_fnargs;
1906 rtx conversion_insns;
1907 HOST_WIDE_INT pretend_args_size;
1908 HOST_WIDE_INT extra_pretend_bytes;
1909 int reg_parm_stack_space;
1912 struct assign_parm_data_one
1914 tree nominal_type;
1915 tree passed_type;
1916 rtx entry_parm;
1917 rtx stack_parm;
1918 enum machine_mode nominal_mode;
1919 enum machine_mode passed_mode;
1920 enum machine_mode promoted_mode;
1921 struct locate_and_pad_arg_data locate;
1922 int partial;
1923 BOOL_BITFIELD named_arg : 1;
1924 BOOL_BITFIELD passed_pointer : 1;
1925 BOOL_BITFIELD on_stack : 1;
1926 BOOL_BITFIELD loaded_in_reg : 1;
1929 /* A subroutine of assign_parms. Initialize ALL. */
1931 static void
1932 assign_parms_initialize_all (struct assign_parm_data_all *all)
1934 tree fntype;
1936 memset (all, 0, sizeof (*all));
1938 fntype = TREE_TYPE (current_function_decl);
1940 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1941 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1942 #else
1943 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1944 current_function_decl, -1);
1945 #endif
1947 #ifdef REG_PARM_STACK_SPACE
1948 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1949 #endif
1952 /* If ARGS contains entries with complex types, split the entry into two
1953 entries of the component type. Return a new list of substitutions are
1954 needed, else the old list. */
1956 static tree
1957 split_complex_args (tree args)
1959 tree p;
1961 /* Before allocating memory, check for the common case of no complex. */
1962 for (p = args; p; p = TREE_CHAIN (p))
1964 tree type = TREE_TYPE (p);
1965 if (TREE_CODE (type) == COMPLEX_TYPE
1966 && targetm.calls.split_complex_arg (type))
1967 goto found;
1969 return args;
1971 found:
1972 args = copy_list (args);
1974 for (p = args; p; p = TREE_CHAIN (p))
1976 tree type = TREE_TYPE (p);
1977 if (TREE_CODE (type) == COMPLEX_TYPE
1978 && targetm.calls.split_complex_arg (type))
1980 tree decl;
1981 tree subtype = TREE_TYPE (type);
1982 bool addressable = TREE_ADDRESSABLE (p);
1984 /* Rewrite the PARM_DECL's type with its component. */
1985 TREE_TYPE (p) = subtype;
1986 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1987 DECL_MODE (p) = VOIDmode;
1988 DECL_SIZE (p) = NULL;
1989 DECL_SIZE_UNIT (p) = NULL;
1990 /* If this arg must go in memory, put it in a pseudo here.
1991 We can't allow it to go in memory as per normal parms,
1992 because the usual place might not have the imag part
1993 adjacent to the real part. */
1994 DECL_ARTIFICIAL (p) = addressable;
1995 DECL_IGNORED_P (p) = addressable;
1996 TREE_ADDRESSABLE (p) = 0;
1997 layout_decl (p, 0);
1999 /* Build a second synthetic decl. */
2000 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2001 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2002 DECL_ARTIFICIAL (decl) = addressable;
2003 DECL_IGNORED_P (decl) = addressable;
2004 layout_decl (decl, 0);
2006 /* Splice it in; skip the new decl. */
2007 TREE_CHAIN (decl) = TREE_CHAIN (p);
2008 TREE_CHAIN (p) = decl;
2009 p = decl;
2013 return args;
2016 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2017 the hidden struct return argument, and (abi willing) complex args.
2018 Return the new parameter list. */
2020 static tree
2021 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2023 tree fndecl = current_function_decl;
2024 tree fntype = TREE_TYPE (fndecl);
2025 tree fnargs = DECL_ARGUMENTS (fndecl);
2027 /* If struct value address is treated as the first argument, make it so. */
2028 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2029 && ! current_function_returns_pcc_struct
2030 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2032 tree type = build_pointer_type (TREE_TYPE (fntype));
2033 tree decl;
2035 decl = build_decl (PARM_DECL, NULL_TREE, type);
2036 DECL_ARG_TYPE (decl) = type;
2037 DECL_ARTIFICIAL (decl) = 1;
2038 DECL_IGNORED_P (decl) = 1;
2040 TREE_CHAIN (decl) = fnargs;
2041 fnargs = decl;
2042 all->function_result_decl = decl;
2045 all->orig_fnargs = fnargs;
2047 /* If the target wants to split complex arguments into scalars, do so. */
2048 if (targetm.calls.split_complex_arg)
2049 fnargs = split_complex_args (fnargs);
2051 return fnargs;
2054 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2055 data for the parameter. Incorporate ABI specifics such as pass-by-
2056 reference and type promotion. */
2058 static void
2059 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2060 struct assign_parm_data_one *data)
2062 tree nominal_type, passed_type;
2063 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2065 memset (data, 0, sizeof (*data));
2067 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2068 if (!current_function_stdarg)
2069 data->named_arg = 1; /* No varadic parms. */
2070 else if (TREE_CHAIN (parm))
2071 data->named_arg = 1; /* Not the last non-varadic parm. */
2072 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2073 data->named_arg = 1; /* Only varadic ones are unnamed. */
2074 else
2075 data->named_arg = 0; /* Treat as varadic. */
2077 nominal_type = TREE_TYPE (parm);
2078 passed_type = DECL_ARG_TYPE (parm);
2080 /* Look out for errors propagating this far. Also, if the parameter's
2081 type is void then its value doesn't matter. */
2082 if (TREE_TYPE (parm) == error_mark_node
2083 /* This can happen after weird syntax errors
2084 or if an enum type is defined among the parms. */
2085 || TREE_CODE (parm) != PARM_DECL
2086 || passed_type == NULL
2087 || VOID_TYPE_P (nominal_type))
2089 nominal_type = passed_type = void_type_node;
2090 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2091 goto egress;
2094 /* Find mode of arg as it is passed, and mode of arg as it should be
2095 during execution of this function. */
2096 passed_mode = TYPE_MODE (passed_type);
2097 nominal_mode = TYPE_MODE (nominal_type);
2099 /* If the parm is to be passed as a transparent union, use the type of
2100 the first field for the tests below. We have already verified that
2101 the modes are the same. */
2102 if (TREE_CODE (passed_type) == UNION_TYPE
2103 && TYPE_TRANSPARENT_UNION (passed_type))
2104 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2106 /* See if this arg was passed by invisible reference. */
2107 if (pass_by_reference (&all->args_so_far, passed_mode,
2108 passed_type, data->named_arg))
2110 passed_type = nominal_type = build_pointer_type (passed_type);
2111 data->passed_pointer = true;
2112 passed_mode = nominal_mode = Pmode;
2115 /* Find mode as it is passed by the ABI. */
2116 promoted_mode = passed_mode;
2117 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2119 int unsignedp = TYPE_UNSIGNED (passed_type);
2120 promoted_mode = promote_mode (passed_type, promoted_mode,
2121 &unsignedp, 1);
2124 egress:
2125 data->nominal_type = nominal_type;
2126 data->passed_type = passed_type;
2127 data->nominal_mode = nominal_mode;
2128 data->passed_mode = passed_mode;
2129 data->promoted_mode = promoted_mode;
2132 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2134 static void
2135 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2136 struct assign_parm_data_one *data, bool no_rtl)
2138 int varargs_pretend_bytes = 0;
2140 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2141 data->promoted_mode,
2142 data->passed_type,
2143 &varargs_pretend_bytes, no_rtl);
2145 /* If the back-end has requested extra stack space, record how much is
2146 needed. Do not change pretend_args_size otherwise since it may be
2147 nonzero from an earlier partial argument. */
2148 if (varargs_pretend_bytes > 0)
2149 all->pretend_args_size = varargs_pretend_bytes;
2152 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2153 the incoming location of the current parameter. */
2155 static void
2156 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2157 struct assign_parm_data_one *data)
2159 HOST_WIDE_INT pretend_bytes = 0;
2160 rtx entry_parm;
2161 bool in_regs;
2163 if (data->promoted_mode == VOIDmode)
2165 data->entry_parm = data->stack_parm = const0_rtx;
2166 return;
2169 #ifdef FUNCTION_INCOMING_ARG
2170 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2171 data->passed_type, data->named_arg);
2172 #else
2173 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2174 data->passed_type, data->named_arg);
2175 #endif
2177 if (entry_parm == 0)
2178 data->promoted_mode = data->passed_mode;
2180 /* Determine parm's home in the stack, in case it arrives in the stack
2181 or we should pretend it did. Compute the stack position and rtx where
2182 the argument arrives and its size.
2184 There is one complexity here: If this was a parameter that would
2185 have been passed in registers, but wasn't only because it is
2186 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2187 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2188 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2189 as it was the previous time. */
2190 in_regs = entry_parm != 0;
2191 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2192 in_regs = true;
2193 #endif
2194 if (!in_regs && !data->named_arg)
2196 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2198 rtx tem;
2199 #ifdef FUNCTION_INCOMING_ARG
2200 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2201 data->passed_type, true);
2202 #else
2203 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2204 data->passed_type, true);
2205 #endif
2206 in_regs = tem != NULL;
2210 /* If this parameter was passed both in registers and in the stack, use
2211 the copy on the stack. */
2212 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2213 data->passed_type))
2214 entry_parm = 0;
2216 if (entry_parm)
2218 int partial;
2220 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2221 data->promoted_mode,
2222 data->passed_type,
2223 data->named_arg);
2224 data->partial = partial;
2226 /* The caller might already have allocated stack space for the
2227 register parameters. */
2228 if (partial != 0 && all->reg_parm_stack_space == 0)
2230 /* Part of this argument is passed in registers and part
2231 is passed on the stack. Ask the prologue code to extend
2232 the stack part so that we can recreate the full value.
2234 PRETEND_BYTES is the size of the registers we need to store.
2235 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2236 stack space that the prologue should allocate.
2238 Internally, gcc assumes that the argument pointer is aligned
2239 to STACK_BOUNDARY bits. This is used both for alignment
2240 optimizations (see init_emit) and to locate arguments that are
2241 aligned to more than PARM_BOUNDARY bits. We must preserve this
2242 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2243 a stack boundary. */
2245 /* We assume at most one partial arg, and it must be the first
2246 argument on the stack. */
2247 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2249 pretend_bytes = partial;
2250 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2252 /* We want to align relative to the actual stack pointer, so
2253 don't include this in the stack size until later. */
2254 all->extra_pretend_bytes = all->pretend_args_size;
2258 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2259 entry_parm ? data->partial : 0, current_function_decl,
2260 &all->stack_args_size, &data->locate);
2262 /* Adjust offsets to include the pretend args. */
2263 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2264 data->locate.slot_offset.constant += pretend_bytes;
2265 data->locate.offset.constant += pretend_bytes;
2267 data->entry_parm = entry_parm;
2270 /* A subroutine of assign_parms. If there is actually space on the stack
2271 for this parm, count it in stack_args_size and return true. */
2273 static bool
2274 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2275 struct assign_parm_data_one *data)
2277 /* Trivially true if we've no incoming register. */
2278 if (data->entry_parm == NULL)
2280 /* Also true if we're partially in registers and partially not,
2281 since we've arranged to drop the entire argument on the stack. */
2282 else if (data->partial != 0)
2284 /* Also true if the target says that it's passed in both registers
2285 and on the stack. */
2286 else if (GET_CODE (data->entry_parm) == PARALLEL
2287 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2289 /* Also true if the target says that there's stack allocated for
2290 all register parameters. */
2291 else if (all->reg_parm_stack_space > 0)
2293 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2294 else
2295 return false;
2297 all->stack_args_size.constant += data->locate.size.constant;
2298 if (data->locate.size.var)
2299 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2301 return true;
2304 /* A subroutine of assign_parms. Given that this parameter is allocated
2305 stack space by the ABI, find it. */
2307 static void
2308 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2310 rtx offset_rtx, stack_parm;
2311 unsigned int align, boundary;
2313 /* If we're passing this arg using a reg, make its stack home the
2314 aligned stack slot. */
2315 if (data->entry_parm)
2316 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2317 else
2318 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2320 stack_parm = current_function_internal_arg_pointer;
2321 if (offset_rtx != const0_rtx)
2322 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2323 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2325 set_mem_attributes (stack_parm, parm, 1);
2327 boundary = data->locate.boundary;
2328 align = BITS_PER_UNIT;
2330 /* If we're padding upward, we know that the alignment of the slot
2331 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2332 intentionally forcing upward padding. Otherwise we have to come
2333 up with a guess at the alignment based on OFFSET_RTX. */
2334 if (data->locate.where_pad != downward || data->entry_parm)
2335 align = boundary;
2336 else if (GET_CODE (offset_rtx) == CONST_INT)
2338 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2339 align = align & -align;
2341 set_mem_align (stack_parm, align);
2343 if (data->entry_parm)
2344 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2346 data->stack_parm = stack_parm;
2349 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2350 always valid and contiguous. */
2352 static void
2353 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2355 rtx entry_parm = data->entry_parm;
2356 rtx stack_parm = data->stack_parm;
2358 /* If this parm was passed part in regs and part in memory, pretend it
2359 arrived entirely in memory by pushing the register-part onto the stack.
2360 In the special case of a DImode or DFmode that is split, we could put
2361 it together in a pseudoreg directly, but for now that's not worth
2362 bothering with. */
2363 if (data->partial != 0)
2365 /* Handle calls that pass values in multiple non-contiguous
2366 locations. The Irix 6 ABI has examples of this. */
2367 if (GET_CODE (entry_parm) == PARALLEL)
2368 emit_group_store (validize_mem (stack_parm), entry_parm,
2369 data->passed_type,
2370 int_size_in_bytes (data->passed_type));
2371 else
2373 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2374 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2375 data->partial / UNITS_PER_WORD);
2378 entry_parm = stack_parm;
2381 /* If we didn't decide this parm came in a register, by default it came
2382 on the stack. */
2383 else if (entry_parm == NULL)
2384 entry_parm = stack_parm;
2386 /* When an argument is passed in multiple locations, we can't make use
2387 of this information, but we can save some copying if the whole argument
2388 is passed in a single register. */
2389 else if (GET_CODE (entry_parm) == PARALLEL
2390 && data->nominal_mode != BLKmode
2391 && data->passed_mode != BLKmode)
2393 size_t i, len = XVECLEN (entry_parm, 0);
2395 for (i = 0; i < len; i++)
2396 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2397 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2398 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2399 == data->passed_mode)
2400 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2402 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2403 break;
2407 data->entry_parm = entry_parm;
2410 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2411 always valid and properly aligned. */
2413 static void
2414 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2416 rtx stack_parm = data->stack_parm;
2418 /* If we can't trust the parm stack slot to be aligned enough for its
2419 ultimate type, don't use that slot after entry. We'll make another
2420 stack slot, if we need one. */
2421 if (stack_parm
2422 && ((STRICT_ALIGNMENT
2423 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2424 || (data->nominal_type
2425 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2426 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2427 stack_parm = NULL;
2429 /* If parm was passed in memory, and we need to convert it on entry,
2430 don't store it back in that same slot. */
2431 else if (data->entry_parm == stack_parm
2432 && data->nominal_mode != BLKmode
2433 && data->nominal_mode != data->passed_mode)
2434 stack_parm = NULL;
2436 /* If stack protection is in effect for this function, don't leave any
2437 pointers in their passed stack slots. */
2438 else if (cfun->stack_protect_guard
2439 && (flag_stack_protect == 2
2440 || data->passed_pointer
2441 || POINTER_TYPE_P (data->nominal_type)))
2442 stack_parm = NULL;
2444 data->stack_parm = stack_parm;
2447 /* A subroutine of assign_parms. Return true if the current parameter
2448 should be stored as a BLKmode in the current frame. */
2450 static bool
2451 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2453 if (data->nominal_mode == BLKmode)
2454 return true;
2455 if (GET_CODE (data->entry_parm) == PARALLEL)
2456 return true;
2458 #ifdef BLOCK_REG_PADDING
2459 /* Only assign_parm_setup_block knows how to deal with register arguments
2460 that are padded at the least significant end. */
2461 if (REG_P (data->entry_parm)
2462 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2463 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2464 == (BYTES_BIG_ENDIAN ? upward : downward)))
2465 return true;
2466 #endif
2468 return false;
2471 /* A subroutine of assign_parms. Arrange for the parameter to be
2472 present and valid in DATA->STACK_RTL. */
2474 static void
2475 assign_parm_setup_block (struct assign_parm_data_all *all,
2476 tree parm, struct assign_parm_data_one *data)
2478 rtx entry_parm = data->entry_parm;
2479 rtx stack_parm = data->stack_parm;
2480 HOST_WIDE_INT size;
2481 HOST_WIDE_INT size_stored;
2482 rtx orig_entry_parm = entry_parm;
2484 if (GET_CODE (entry_parm) == PARALLEL)
2485 entry_parm = emit_group_move_into_temps (entry_parm);
2487 /* If we've a non-block object that's nevertheless passed in parts,
2488 reconstitute it in register operations rather than on the stack. */
2489 if (GET_CODE (entry_parm) == PARALLEL
2490 && data->nominal_mode != BLKmode)
2492 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2494 if ((XVECLEN (entry_parm, 0) > 1
2495 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2496 && use_register_for_decl (parm))
2498 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2500 push_to_sequence (all->conversion_insns);
2502 /* For values returned in multiple registers, handle possible
2503 incompatible calls to emit_group_store.
2505 For example, the following would be invalid, and would have to
2506 be fixed by the conditional below:
2508 emit_group_store ((reg:SF), (parallel:DF))
2509 emit_group_store ((reg:SI), (parallel:DI))
2511 An example of this are doubles in e500 v2:
2512 (parallel:DF (expr_list (reg:SI) (const_int 0))
2513 (expr_list (reg:SI) (const_int 4))). */
2514 if (data->nominal_mode != data->passed_mode)
2516 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2517 emit_group_store (t, entry_parm, NULL_TREE,
2518 GET_MODE_SIZE (GET_MODE (entry_parm)));
2519 convert_move (parmreg, t, 0);
2521 else
2522 emit_group_store (parmreg, entry_parm, data->nominal_type,
2523 int_size_in_bytes (data->nominal_type));
2525 all->conversion_insns = get_insns ();
2526 end_sequence ();
2528 SET_DECL_RTL (parm, parmreg);
2529 return;
2533 size = int_size_in_bytes (data->passed_type);
2534 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2535 if (stack_parm == 0)
2537 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2538 stack_parm = assign_stack_local (BLKmode, size_stored,
2539 DECL_ALIGN (parm));
2540 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2541 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2542 set_mem_attributes (stack_parm, parm, 1);
2545 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2546 calls that pass values in multiple non-contiguous locations. */
2547 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2549 rtx mem;
2551 /* Note that we will be storing an integral number of words.
2552 So we have to be careful to ensure that we allocate an
2553 integral number of words. We do this above when we call
2554 assign_stack_local if space was not allocated in the argument
2555 list. If it was, this will not work if PARM_BOUNDARY is not
2556 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2557 if it becomes a problem. Exception is when BLKmode arrives
2558 with arguments not conforming to word_mode. */
2560 if (data->stack_parm == 0)
2562 else if (GET_CODE (entry_parm) == PARALLEL)
2564 else
2565 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2567 mem = validize_mem (stack_parm);
2569 /* Handle values in multiple non-contiguous locations. */
2570 if (GET_CODE (entry_parm) == PARALLEL)
2572 push_to_sequence (all->conversion_insns);
2573 emit_group_store (mem, entry_parm, data->passed_type, size);
2574 all->conversion_insns = get_insns ();
2575 end_sequence ();
2578 else if (size == 0)
2581 /* If SIZE is that of a mode no bigger than a word, just use
2582 that mode's store operation. */
2583 else if (size <= UNITS_PER_WORD)
2585 enum machine_mode mode
2586 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2588 if (mode != BLKmode
2589 #ifdef BLOCK_REG_PADDING
2590 && (size == UNITS_PER_WORD
2591 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2592 != (BYTES_BIG_ENDIAN ? upward : downward)))
2593 #endif
2596 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2597 emit_move_insn (change_address (mem, mode, 0), reg);
2600 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2601 machine must be aligned to the left before storing
2602 to memory. Note that the previous test doesn't
2603 handle all cases (e.g. SIZE == 3). */
2604 else if (size != UNITS_PER_WORD
2605 #ifdef BLOCK_REG_PADDING
2606 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2607 == downward)
2608 #else
2609 && BYTES_BIG_ENDIAN
2610 #endif
2613 rtx tem, x;
2614 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2615 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2617 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2618 build_int_cst (NULL_TREE, by),
2619 NULL_RTX, 1);
2620 tem = change_address (mem, word_mode, 0);
2621 emit_move_insn (tem, x);
2623 else
2624 move_block_from_reg (REGNO (entry_parm), mem,
2625 size_stored / UNITS_PER_WORD);
2627 else
2628 move_block_from_reg (REGNO (entry_parm), mem,
2629 size_stored / UNITS_PER_WORD);
2631 else if (data->stack_parm == 0)
2633 push_to_sequence (all->conversion_insns);
2634 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2635 BLOCK_OP_NORMAL);
2636 all->conversion_insns = get_insns ();
2637 end_sequence ();
2640 data->stack_parm = stack_parm;
2641 SET_DECL_RTL (parm, stack_parm);
2644 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2645 parameter. Get it there. Perform all ABI specified conversions. */
2647 static void
2648 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2649 struct assign_parm_data_one *data)
2651 rtx parmreg;
2652 enum machine_mode promoted_nominal_mode;
2653 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2654 bool did_conversion = false;
2656 /* Store the parm in a pseudoregister during the function, but we may
2657 need to do it in a wider mode. */
2659 /* This is not really promoting for a call. However we need to be
2660 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2661 promoted_nominal_mode
2662 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2664 parmreg = gen_reg_rtx (promoted_nominal_mode);
2666 if (!DECL_ARTIFICIAL (parm))
2667 mark_user_reg (parmreg);
2669 /* If this was an item that we received a pointer to,
2670 set DECL_RTL appropriately. */
2671 if (data->passed_pointer)
2673 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2674 set_mem_attributes (x, parm, 1);
2675 SET_DECL_RTL (parm, x);
2677 else
2678 SET_DECL_RTL (parm, parmreg);
2680 /* Copy the value into the register. */
2681 if (data->nominal_mode != data->passed_mode
2682 || promoted_nominal_mode != data->promoted_mode)
2684 int save_tree_used;
2686 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2687 mode, by the caller. We now have to convert it to
2688 NOMINAL_MODE, if different. However, PARMREG may be in
2689 a different mode than NOMINAL_MODE if it is being stored
2690 promoted.
2692 If ENTRY_PARM is a hard register, it might be in a register
2693 not valid for operating in its mode (e.g., an odd-numbered
2694 register for a DFmode). In that case, moves are the only
2695 thing valid, so we can't do a convert from there. This
2696 occurs when the calling sequence allow such misaligned
2697 usages.
2699 In addition, the conversion may involve a call, which could
2700 clobber parameters which haven't been copied to pseudo
2701 registers yet. Therefore, we must first copy the parm to
2702 a pseudo reg here, and save the conversion until after all
2703 parameters have been moved. */
2705 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2707 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2709 push_to_sequence (all->conversion_insns);
2710 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2712 if (GET_CODE (tempreg) == SUBREG
2713 && GET_MODE (tempreg) == data->nominal_mode
2714 && REG_P (SUBREG_REG (tempreg))
2715 && data->nominal_mode == data->passed_mode
2716 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2717 && GET_MODE_SIZE (GET_MODE (tempreg))
2718 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2720 /* The argument is already sign/zero extended, so note it
2721 into the subreg. */
2722 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2723 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2726 /* TREE_USED gets set erroneously during expand_assignment. */
2727 save_tree_used = TREE_USED (parm);
2728 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2729 TREE_USED (parm) = save_tree_used;
2730 all->conversion_insns = get_insns ();
2731 end_sequence ();
2733 did_conversion = true;
2735 else
2736 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2738 /* If we were passed a pointer but the actual value can safely live
2739 in a register, put it in one. */
2740 if (data->passed_pointer
2741 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2742 /* If by-reference argument was promoted, demote it. */
2743 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2744 || use_register_for_decl (parm)))
2746 /* We can't use nominal_mode, because it will have been set to
2747 Pmode above. We must use the actual mode of the parm. */
2748 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2749 mark_user_reg (parmreg);
2751 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2753 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2754 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2756 push_to_sequence (all->conversion_insns);
2757 emit_move_insn (tempreg, DECL_RTL (parm));
2758 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2759 emit_move_insn (parmreg, tempreg);
2760 all->conversion_insns = get_insns ();
2761 end_sequence ();
2763 did_conversion = true;
2765 else
2766 emit_move_insn (parmreg, DECL_RTL (parm));
2768 SET_DECL_RTL (parm, parmreg);
2770 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2771 now the parm. */
2772 data->stack_parm = NULL;
2775 /* Mark the register as eliminable if we did no conversion and it was
2776 copied from memory at a fixed offset, and the arg pointer was not
2777 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2778 offset formed an invalid address, such memory-equivalences as we
2779 make here would screw up life analysis for it. */
2780 if (data->nominal_mode == data->passed_mode
2781 && !did_conversion
2782 && data->stack_parm != 0
2783 && MEM_P (data->stack_parm)
2784 && data->locate.offset.var == 0
2785 && reg_mentioned_p (virtual_incoming_args_rtx,
2786 XEXP (data->stack_parm, 0)))
2788 rtx linsn = get_last_insn ();
2789 rtx sinsn, set;
2791 /* Mark complex types separately. */
2792 if (GET_CODE (parmreg) == CONCAT)
2794 enum machine_mode submode
2795 = GET_MODE_INNER (GET_MODE (parmreg));
2796 int regnor = REGNO (XEXP (parmreg, 0));
2797 int regnoi = REGNO (XEXP (parmreg, 1));
2798 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2799 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2800 GET_MODE_SIZE (submode));
2802 /* Scan backwards for the set of the real and
2803 imaginary parts. */
2804 for (sinsn = linsn; sinsn != 0;
2805 sinsn = prev_nonnote_insn (sinsn))
2807 set = single_set (sinsn);
2808 if (set == 0)
2809 continue;
2811 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2812 REG_NOTES (sinsn)
2813 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2814 REG_NOTES (sinsn));
2815 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2816 REG_NOTES (sinsn)
2817 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2818 REG_NOTES (sinsn));
2821 else if ((set = single_set (linsn)) != 0
2822 && SET_DEST (set) == parmreg)
2823 REG_NOTES (linsn)
2824 = gen_rtx_EXPR_LIST (REG_EQUIV,
2825 data->stack_parm, REG_NOTES (linsn));
2828 /* For pointer data type, suggest pointer register. */
2829 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2830 mark_reg_pointer (parmreg,
2831 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2834 /* A subroutine of assign_parms. Allocate stack space to hold the current
2835 parameter. Get it there. Perform all ABI specified conversions. */
2837 static void
2838 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2839 struct assign_parm_data_one *data)
2841 /* Value must be stored in the stack slot STACK_PARM during function
2842 execution. */
2843 bool to_conversion = false;
2845 if (data->promoted_mode != data->nominal_mode)
2847 /* Conversion is required. */
2848 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2850 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2852 push_to_sequence (all->conversion_insns);
2853 to_conversion = true;
2855 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2856 TYPE_UNSIGNED (TREE_TYPE (parm)));
2858 if (data->stack_parm)
2859 /* ??? This may need a big-endian conversion on sparc64. */
2860 data->stack_parm
2861 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2864 if (data->entry_parm != data->stack_parm)
2866 rtx src, dest;
2868 if (data->stack_parm == 0)
2870 data->stack_parm
2871 = assign_stack_local (GET_MODE (data->entry_parm),
2872 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2873 TYPE_ALIGN (data->passed_type));
2874 set_mem_attributes (data->stack_parm, parm, 1);
2877 dest = validize_mem (data->stack_parm);
2878 src = validize_mem (data->entry_parm);
2880 if (MEM_P (src))
2882 /* Use a block move to handle potentially misaligned entry_parm. */
2883 if (!to_conversion)
2884 push_to_sequence (all->conversion_insns);
2885 to_conversion = true;
2887 emit_block_move (dest, src,
2888 GEN_INT (int_size_in_bytes (data->passed_type)),
2889 BLOCK_OP_NORMAL);
2891 else
2892 emit_move_insn (dest, src);
2895 if (to_conversion)
2897 all->conversion_insns = get_insns ();
2898 end_sequence ();
2901 SET_DECL_RTL (parm, data->stack_parm);
2904 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2905 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2907 static void
2908 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2910 tree parm;
2911 tree orig_fnargs = all->orig_fnargs;
2913 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2915 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2916 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2918 rtx tmp, real, imag;
2919 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2921 real = DECL_RTL (fnargs);
2922 imag = DECL_RTL (TREE_CHAIN (fnargs));
2923 if (inner != GET_MODE (real))
2925 real = gen_lowpart_SUBREG (inner, real);
2926 imag = gen_lowpart_SUBREG (inner, imag);
2929 if (TREE_ADDRESSABLE (parm))
2931 rtx rmem, imem;
2932 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2934 /* split_complex_arg put the real and imag parts in
2935 pseudos. Move them to memory. */
2936 tmp = assign_stack_local (DECL_MODE (parm), size,
2937 TYPE_ALIGN (TREE_TYPE (parm)));
2938 set_mem_attributes (tmp, parm, 1);
2939 rmem = adjust_address_nv (tmp, inner, 0);
2940 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2941 push_to_sequence (all->conversion_insns);
2942 emit_move_insn (rmem, real);
2943 emit_move_insn (imem, imag);
2944 all->conversion_insns = get_insns ();
2945 end_sequence ();
2947 else
2948 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2949 SET_DECL_RTL (parm, tmp);
2951 real = DECL_INCOMING_RTL (fnargs);
2952 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2953 if (inner != GET_MODE (real))
2955 real = gen_lowpart_SUBREG (inner, real);
2956 imag = gen_lowpart_SUBREG (inner, imag);
2958 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2959 set_decl_incoming_rtl (parm, tmp);
2960 fnargs = TREE_CHAIN (fnargs);
2962 else
2964 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2965 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2967 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2968 instead of the copy of decl, i.e. FNARGS. */
2969 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2970 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2973 fnargs = TREE_CHAIN (fnargs);
2977 /* Assign RTL expressions to the function's parameters. This may involve
2978 copying them into registers and using those registers as the DECL_RTL. */
2980 static void
2981 assign_parms (tree fndecl)
2983 struct assign_parm_data_all all;
2984 tree fnargs, parm;
2986 current_function_internal_arg_pointer
2987 = targetm.calls.internal_arg_pointer ();
2989 assign_parms_initialize_all (&all);
2990 fnargs = assign_parms_augmented_arg_list (&all);
2992 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2994 struct assign_parm_data_one data;
2996 /* Extract the type of PARM; adjust it according to ABI. */
2997 assign_parm_find_data_types (&all, parm, &data);
2999 /* Early out for errors and void parameters. */
3000 if (data.passed_mode == VOIDmode)
3002 SET_DECL_RTL (parm, const0_rtx);
3003 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3004 continue;
3007 if (current_function_stdarg && !TREE_CHAIN (parm))
3008 assign_parms_setup_varargs (&all, &data, false);
3010 /* Find out where the parameter arrives in this function. */
3011 assign_parm_find_entry_rtl (&all, &data);
3013 /* Find out where stack space for this parameter might be. */
3014 if (assign_parm_is_stack_parm (&all, &data))
3016 assign_parm_find_stack_rtl (parm, &data);
3017 assign_parm_adjust_entry_rtl (&data);
3020 /* Record permanently how this parm was passed. */
3021 set_decl_incoming_rtl (parm, data.entry_parm);
3023 /* Update info on where next arg arrives in registers. */
3024 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3025 data.passed_type, data.named_arg);
3027 assign_parm_adjust_stack_rtl (&data);
3029 if (assign_parm_setup_block_p (&data))
3030 assign_parm_setup_block (&all, parm, &data);
3031 else if (data.passed_pointer || use_register_for_decl (parm))
3032 assign_parm_setup_reg (&all, parm, &data);
3033 else
3034 assign_parm_setup_stack (&all, parm, &data);
3037 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3038 assign_parms_unsplit_complex (&all, fnargs);
3040 /* Output all parameter conversion instructions (possibly including calls)
3041 now that all parameters have been copied out of hard registers. */
3042 emit_insn (all.conversion_insns);
3044 /* If we are receiving a struct value address as the first argument, set up
3045 the RTL for the function result. As this might require code to convert
3046 the transmitted address to Pmode, we do this here to ensure that possible
3047 preliminary conversions of the address have been emitted already. */
3048 if (all.function_result_decl)
3050 tree result = DECL_RESULT (current_function_decl);
3051 rtx addr = DECL_RTL (all.function_result_decl);
3052 rtx x;
3054 if (DECL_BY_REFERENCE (result))
3055 x = addr;
3056 else
3058 addr = convert_memory_address (Pmode, addr);
3059 x = gen_rtx_MEM (DECL_MODE (result), addr);
3060 set_mem_attributes (x, result, 1);
3062 SET_DECL_RTL (result, x);
3065 /* We have aligned all the args, so add space for the pretend args. */
3066 current_function_pretend_args_size = all.pretend_args_size;
3067 all.stack_args_size.constant += all.extra_pretend_bytes;
3068 current_function_args_size = all.stack_args_size.constant;
3070 /* Adjust function incoming argument size for alignment and
3071 minimum length. */
3073 #ifdef REG_PARM_STACK_SPACE
3074 current_function_args_size = MAX (current_function_args_size,
3075 REG_PARM_STACK_SPACE (fndecl));
3076 #endif
3078 current_function_args_size = CEIL_ROUND (current_function_args_size,
3079 PARM_BOUNDARY / BITS_PER_UNIT);
3081 #ifdef ARGS_GROW_DOWNWARD
3082 current_function_arg_offset_rtx
3083 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3084 : expand_expr (size_diffop (all.stack_args_size.var,
3085 size_int (-all.stack_args_size.constant)),
3086 NULL_RTX, VOIDmode, 0));
3087 #else
3088 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3089 #endif
3091 /* See how many bytes, if any, of its args a function should try to pop
3092 on return. */
3094 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3095 current_function_args_size);
3097 /* For stdarg.h function, save info about
3098 regs and stack space used by the named args. */
3100 current_function_args_info = all.args_so_far;
3102 /* Set the rtx used for the function return value. Put this in its
3103 own variable so any optimizers that need this information don't have
3104 to include tree.h. Do this here so it gets done when an inlined
3105 function gets output. */
3107 current_function_return_rtx
3108 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3109 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3111 /* If scalar return value was computed in a pseudo-reg, or was a named
3112 return value that got dumped to the stack, copy that to the hard
3113 return register. */
3114 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3116 tree decl_result = DECL_RESULT (fndecl);
3117 rtx decl_rtl = DECL_RTL (decl_result);
3119 if (REG_P (decl_rtl)
3120 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3121 : DECL_REGISTER (decl_result))
3123 rtx real_decl_rtl;
3125 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3126 fndecl, true);
3127 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3128 /* The delay slot scheduler assumes that current_function_return_rtx
3129 holds the hard register containing the return value, not a
3130 temporary pseudo. */
3131 current_function_return_rtx = real_decl_rtl;
3136 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3137 For all seen types, gimplify their sizes. */
3139 static tree
3140 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3142 tree t = *tp;
3144 *walk_subtrees = 0;
3145 if (TYPE_P (t))
3147 if (POINTER_TYPE_P (t))
3148 *walk_subtrees = 1;
3149 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3150 && !TYPE_SIZES_GIMPLIFIED (t))
3152 gimplify_type_sizes (t, (tree *) data);
3153 *walk_subtrees = 1;
3157 return NULL;
3160 /* Gimplify the parameter list for current_function_decl. This involves
3161 evaluating SAVE_EXPRs of variable sized parameters and generating code
3162 to implement callee-copies reference parameters. Returns a list of
3163 statements to add to the beginning of the function, or NULL if nothing
3164 to do. */
3166 tree
3167 gimplify_parameters (void)
3169 struct assign_parm_data_all all;
3170 tree fnargs, parm, stmts = NULL;
3172 assign_parms_initialize_all (&all);
3173 fnargs = assign_parms_augmented_arg_list (&all);
3175 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3177 struct assign_parm_data_one data;
3179 /* Extract the type of PARM; adjust it according to ABI. */
3180 assign_parm_find_data_types (&all, parm, &data);
3182 /* Early out for errors and void parameters. */
3183 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3184 continue;
3186 /* Update info on where next arg arrives in registers. */
3187 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3188 data.passed_type, data.named_arg);
3190 /* ??? Once upon a time variable_size stuffed parameter list
3191 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3192 turned out to be less than manageable in the gimple world.
3193 Now we have to hunt them down ourselves. */
3194 walk_tree_without_duplicates (&data.passed_type,
3195 gimplify_parm_type, &stmts);
3197 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3199 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3200 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3203 if (data.passed_pointer)
3205 tree type = TREE_TYPE (data.passed_type);
3206 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3207 type, data.named_arg))
3209 tree local, t;
3211 /* For constant sized objects, this is trivial; for
3212 variable-sized objects, we have to play games. */
3213 if (TREE_CONSTANT (DECL_SIZE (parm)))
3215 local = create_tmp_var (type, get_name (parm));
3216 DECL_IGNORED_P (local) = 0;
3218 else
3220 tree ptr_type, addr, args;
3222 ptr_type = build_pointer_type (type);
3223 addr = create_tmp_var (ptr_type, get_name (parm));
3224 DECL_IGNORED_P (addr) = 0;
3225 local = build_fold_indirect_ref (addr);
3227 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3228 t = built_in_decls[BUILT_IN_ALLOCA];
3229 t = build_function_call_expr (t, args);
3230 t = fold_convert (ptr_type, t);
3231 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
3232 gimplify_and_add (t, &stmts);
3235 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, local, parm);
3236 gimplify_and_add (t, &stmts);
3238 SET_DECL_VALUE_EXPR (parm, local);
3239 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3244 return stmts;
3247 /* Indicate whether REGNO is an incoming argument to the current function
3248 that was promoted to a wider mode. If so, return the RTX for the
3249 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3250 that REGNO is promoted from and whether the promotion was signed or
3251 unsigned. */
3254 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3256 tree arg;
3258 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3259 arg = TREE_CHAIN (arg))
3260 if (REG_P (DECL_INCOMING_RTL (arg))
3261 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3262 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3264 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3265 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3267 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3268 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3269 && mode != DECL_MODE (arg))
3271 *pmode = DECL_MODE (arg);
3272 *punsignedp = unsignedp;
3273 return DECL_INCOMING_RTL (arg);
3277 return 0;
3281 /* Compute the size and offset from the start of the stacked arguments for a
3282 parm passed in mode PASSED_MODE and with type TYPE.
3284 INITIAL_OFFSET_PTR points to the current offset into the stacked
3285 arguments.
3287 The starting offset and size for this parm are returned in
3288 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3289 nonzero, the offset is that of stack slot, which is returned in
3290 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3291 padding required from the initial offset ptr to the stack slot.
3293 IN_REGS is nonzero if the argument will be passed in registers. It will
3294 never be set if REG_PARM_STACK_SPACE is not defined.
3296 FNDECL is the function in which the argument was defined.
3298 There are two types of rounding that are done. The first, controlled by
3299 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3300 list to be aligned to the specific boundary (in bits). This rounding
3301 affects the initial and starting offsets, but not the argument size.
3303 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3304 optionally rounds the size of the parm to PARM_BOUNDARY. The
3305 initial offset is not affected by this rounding, while the size always
3306 is and the starting offset may be. */
3308 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3309 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3310 callers pass in the total size of args so far as
3311 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3313 void
3314 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3315 int partial, tree fndecl ATTRIBUTE_UNUSED,
3316 struct args_size *initial_offset_ptr,
3317 struct locate_and_pad_arg_data *locate)
3319 tree sizetree;
3320 enum direction where_pad;
3321 unsigned int boundary;
3322 int reg_parm_stack_space = 0;
3323 int part_size_in_regs;
3325 #ifdef REG_PARM_STACK_SPACE
3326 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3328 /* If we have found a stack parm before we reach the end of the
3329 area reserved for registers, skip that area. */
3330 if (! in_regs)
3332 if (reg_parm_stack_space > 0)
3334 if (initial_offset_ptr->var)
3336 initial_offset_ptr->var
3337 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3338 ssize_int (reg_parm_stack_space));
3339 initial_offset_ptr->constant = 0;
3341 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3342 initial_offset_ptr->constant = reg_parm_stack_space;
3345 #endif /* REG_PARM_STACK_SPACE */
3347 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3349 sizetree
3350 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3351 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3352 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3353 locate->where_pad = where_pad;
3354 locate->boundary = boundary;
3356 /* Remember if the outgoing parameter requires extra alignment on the
3357 calling function side. */
3358 if (boundary > PREFERRED_STACK_BOUNDARY)
3359 boundary = PREFERRED_STACK_BOUNDARY;
3360 if (cfun->stack_alignment_needed < boundary)
3361 cfun->stack_alignment_needed = boundary;
3363 #ifdef ARGS_GROW_DOWNWARD
3364 locate->slot_offset.constant = -initial_offset_ptr->constant;
3365 if (initial_offset_ptr->var)
3366 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3367 initial_offset_ptr->var);
3370 tree s2 = sizetree;
3371 if (where_pad != none
3372 && (!host_integerp (sizetree, 1)
3373 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3374 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3375 SUB_PARM_SIZE (locate->slot_offset, s2);
3378 locate->slot_offset.constant += part_size_in_regs;
3380 if (!in_regs
3381 #ifdef REG_PARM_STACK_SPACE
3382 || REG_PARM_STACK_SPACE (fndecl) > 0
3383 #endif
3385 pad_to_arg_alignment (&locate->slot_offset, boundary,
3386 &locate->alignment_pad);
3388 locate->size.constant = (-initial_offset_ptr->constant
3389 - locate->slot_offset.constant);
3390 if (initial_offset_ptr->var)
3391 locate->size.var = size_binop (MINUS_EXPR,
3392 size_binop (MINUS_EXPR,
3393 ssize_int (0),
3394 initial_offset_ptr->var),
3395 locate->slot_offset.var);
3397 /* Pad_below needs the pre-rounded size to know how much to pad
3398 below. */
3399 locate->offset = locate->slot_offset;
3400 if (where_pad == downward)
3401 pad_below (&locate->offset, passed_mode, sizetree);
3403 #else /* !ARGS_GROW_DOWNWARD */
3404 if (!in_regs
3405 #ifdef REG_PARM_STACK_SPACE
3406 || REG_PARM_STACK_SPACE (fndecl) > 0
3407 #endif
3409 pad_to_arg_alignment (initial_offset_ptr, boundary,
3410 &locate->alignment_pad);
3411 locate->slot_offset = *initial_offset_ptr;
3413 #ifdef PUSH_ROUNDING
3414 if (passed_mode != BLKmode)
3415 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3416 #endif
3418 /* Pad_below needs the pre-rounded size to know how much to pad below
3419 so this must be done before rounding up. */
3420 locate->offset = locate->slot_offset;
3421 if (where_pad == downward)
3422 pad_below (&locate->offset, passed_mode, sizetree);
3424 if (where_pad != none
3425 && (!host_integerp (sizetree, 1)
3426 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3427 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3429 ADD_PARM_SIZE (locate->size, sizetree);
3431 locate->size.constant -= part_size_in_regs;
3432 #endif /* ARGS_GROW_DOWNWARD */
3435 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3436 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3438 static void
3439 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3440 struct args_size *alignment_pad)
3442 tree save_var = NULL_TREE;
3443 HOST_WIDE_INT save_constant = 0;
3444 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3445 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3447 #ifdef SPARC_STACK_BOUNDARY_HACK
3448 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3449 the real alignment of %sp. However, when it does this, the
3450 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3451 if (SPARC_STACK_BOUNDARY_HACK)
3452 sp_offset = 0;
3453 #endif
3455 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3457 save_var = offset_ptr->var;
3458 save_constant = offset_ptr->constant;
3461 alignment_pad->var = NULL_TREE;
3462 alignment_pad->constant = 0;
3464 if (boundary > BITS_PER_UNIT)
3466 if (offset_ptr->var)
3468 tree sp_offset_tree = ssize_int (sp_offset);
3469 tree offset = size_binop (PLUS_EXPR,
3470 ARGS_SIZE_TREE (*offset_ptr),
3471 sp_offset_tree);
3472 #ifdef ARGS_GROW_DOWNWARD
3473 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3474 #else
3475 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3476 #endif
3478 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3479 /* ARGS_SIZE_TREE includes constant term. */
3480 offset_ptr->constant = 0;
3481 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3482 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3483 save_var);
3485 else
3487 offset_ptr->constant = -sp_offset +
3488 #ifdef ARGS_GROW_DOWNWARD
3489 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3490 #else
3491 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3492 #endif
3493 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3494 alignment_pad->constant = offset_ptr->constant - save_constant;
3499 static void
3500 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3502 if (passed_mode != BLKmode)
3504 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3505 offset_ptr->constant
3506 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3507 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3508 - GET_MODE_SIZE (passed_mode));
3510 else
3512 if (TREE_CODE (sizetree) != INTEGER_CST
3513 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3515 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3516 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3517 /* Add it in. */
3518 ADD_PARM_SIZE (*offset_ptr, s2);
3519 SUB_PARM_SIZE (*offset_ptr, sizetree);
3524 /* Walk the tree of blocks describing the binding levels within a function
3525 and warn about variables the might be killed by setjmp or vfork.
3526 This is done after calling flow_analysis and before global_alloc
3527 clobbers the pseudo-regs to hard regs. */
3529 void
3530 setjmp_vars_warning (tree block)
3532 tree decl, sub;
3534 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3536 if (TREE_CODE (decl) == VAR_DECL
3537 && DECL_RTL_SET_P (decl)
3538 && REG_P (DECL_RTL (decl))
3539 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3540 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3541 " or %<vfork%>",
3542 decl);
3545 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3546 setjmp_vars_warning (sub);
3549 /* Do the appropriate part of setjmp_vars_warning
3550 but for arguments instead of local variables. */
3552 void
3553 setjmp_args_warning (void)
3555 tree decl;
3556 for (decl = DECL_ARGUMENTS (current_function_decl);
3557 decl; decl = TREE_CHAIN (decl))
3558 if (DECL_RTL (decl) != 0
3559 && REG_P (DECL_RTL (decl))
3560 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3561 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3562 decl);
3566 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3567 and create duplicate blocks. */
3568 /* ??? Need an option to either create block fragments or to create
3569 abstract origin duplicates of a source block. It really depends
3570 on what optimization has been performed. */
3572 void
3573 reorder_blocks (void)
3575 tree block = DECL_INITIAL (current_function_decl);
3576 VEC(tree,heap) *block_stack;
3578 if (block == NULL_TREE)
3579 return;
3581 block_stack = VEC_alloc (tree, heap, 10);
3583 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3584 clear_block_marks (block);
3586 /* Prune the old trees away, so that they don't get in the way. */
3587 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3588 BLOCK_CHAIN (block) = NULL_TREE;
3590 /* Recreate the block tree from the note nesting. */
3591 reorder_blocks_1 (get_insns (), block, &block_stack);
3592 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3594 VEC_free (tree, heap, block_stack);
3597 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3599 void
3600 clear_block_marks (tree block)
3602 while (block)
3604 TREE_ASM_WRITTEN (block) = 0;
3605 clear_block_marks (BLOCK_SUBBLOCKS (block));
3606 block = BLOCK_CHAIN (block);
3610 static void
3611 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3613 rtx insn;
3615 for (insn = insns; insn; insn = NEXT_INSN (insn))
3617 if (NOTE_P (insn))
3619 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3621 tree block = NOTE_BLOCK (insn);
3622 tree origin;
3624 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3625 ? BLOCK_FRAGMENT_ORIGIN (block)
3626 : block);
3628 /* If we have seen this block before, that means it now
3629 spans multiple address regions. Create a new fragment. */
3630 if (TREE_ASM_WRITTEN (block))
3632 tree new_block = copy_node (block);
3634 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3635 BLOCK_FRAGMENT_CHAIN (new_block)
3636 = BLOCK_FRAGMENT_CHAIN (origin);
3637 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3639 NOTE_BLOCK (insn) = new_block;
3640 block = new_block;
3643 BLOCK_SUBBLOCKS (block) = 0;
3644 TREE_ASM_WRITTEN (block) = 1;
3645 /* When there's only one block for the entire function,
3646 current_block == block and we mustn't do this, it
3647 will cause infinite recursion. */
3648 if (block != current_block)
3650 if (block != origin)
3651 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3653 BLOCK_SUPERCONTEXT (block) = current_block;
3654 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3655 BLOCK_SUBBLOCKS (current_block) = block;
3656 current_block = origin;
3658 VEC_safe_push (tree, heap, *p_block_stack, block);
3660 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3662 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3663 BLOCK_SUBBLOCKS (current_block)
3664 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3665 current_block = BLOCK_SUPERCONTEXT (current_block);
3671 /* Reverse the order of elements in the chain T of blocks,
3672 and return the new head of the chain (old last element). */
3674 tree
3675 blocks_nreverse (tree t)
3677 tree prev = 0, decl, next;
3678 for (decl = t; decl; decl = next)
3680 next = BLOCK_CHAIN (decl);
3681 BLOCK_CHAIN (decl) = prev;
3682 prev = decl;
3684 return prev;
3687 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3688 non-NULL, list them all into VECTOR, in a depth-first preorder
3689 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3690 blocks. */
3692 static int
3693 all_blocks (tree block, tree *vector)
3695 int n_blocks = 0;
3697 while (block)
3699 TREE_ASM_WRITTEN (block) = 0;
3701 /* Record this block. */
3702 if (vector)
3703 vector[n_blocks] = block;
3705 ++n_blocks;
3707 /* Record the subblocks, and their subblocks... */
3708 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3709 vector ? vector + n_blocks : 0);
3710 block = BLOCK_CHAIN (block);
3713 return n_blocks;
3716 /* Return a vector containing all the blocks rooted at BLOCK. The
3717 number of elements in the vector is stored in N_BLOCKS_P. The
3718 vector is dynamically allocated; it is the caller's responsibility
3719 to call `free' on the pointer returned. */
3721 static tree *
3722 get_block_vector (tree block, int *n_blocks_p)
3724 tree *block_vector;
3726 *n_blocks_p = all_blocks (block, NULL);
3727 block_vector = XNEWVEC (tree, *n_blocks_p);
3728 all_blocks (block, block_vector);
3730 return block_vector;
3733 static GTY(()) int next_block_index = 2;
3735 /* Set BLOCK_NUMBER for all the blocks in FN. */
3737 void
3738 number_blocks (tree fn)
3740 int i;
3741 int n_blocks;
3742 tree *block_vector;
3744 /* For SDB and XCOFF debugging output, we start numbering the blocks
3745 from 1 within each function, rather than keeping a running
3746 count. */
3747 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3748 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3749 next_block_index = 1;
3750 #endif
3752 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3754 /* The top-level BLOCK isn't numbered at all. */
3755 for (i = 1; i < n_blocks; ++i)
3756 /* We number the blocks from two. */
3757 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3759 free (block_vector);
3761 return;
3764 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3766 tree
3767 debug_find_var_in_block_tree (tree var, tree block)
3769 tree t;
3771 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3772 if (t == var)
3773 return block;
3775 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3777 tree ret = debug_find_var_in_block_tree (var, t);
3778 if (ret)
3779 return ret;
3782 return NULL_TREE;
3785 /* Allocate a function structure for FNDECL and set its contents
3786 to the defaults. */
3788 void
3789 allocate_struct_function (tree fndecl)
3791 tree result;
3792 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3794 cfun = ggc_alloc_cleared (sizeof (struct function));
3796 cfun->stack_alignment_needed = STACK_BOUNDARY;
3797 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3799 current_function_funcdef_no = funcdef_no++;
3801 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3803 init_eh_for_function ();
3805 lang_hooks.function.init (cfun);
3806 if (init_machine_status)
3807 cfun->machine = (*init_machine_status) ();
3809 if (fndecl == NULL)
3810 return;
3812 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3813 cfun->decl = fndecl;
3815 result = DECL_RESULT (fndecl);
3816 if (aggregate_value_p (result, fndecl))
3818 #ifdef PCC_STATIC_STRUCT_RETURN
3819 current_function_returns_pcc_struct = 1;
3820 #endif
3821 current_function_returns_struct = 1;
3824 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3826 current_function_stdarg
3827 = (fntype
3828 && TYPE_ARG_TYPES (fntype) != 0
3829 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3830 != void_type_node));
3832 /* Assume all registers in stdarg functions need to be saved. */
3833 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3834 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3837 /* Reset cfun, and other non-struct-function variables to defaults as
3838 appropriate for emitting rtl at the start of a function. */
3840 static void
3841 prepare_function_start (tree fndecl)
3843 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3844 cfun = DECL_STRUCT_FUNCTION (fndecl);
3845 else
3846 allocate_struct_function (fndecl);
3847 init_emit ();
3848 init_varasm_status (cfun);
3849 init_expr ();
3851 cse_not_expected = ! optimize;
3853 /* Caller save not needed yet. */
3854 caller_save_needed = 0;
3856 /* We haven't done register allocation yet. */
3857 reg_renumber = 0;
3859 /* Indicate that we have not instantiated virtual registers yet. */
3860 virtuals_instantiated = 0;
3862 /* Indicate that we want CONCATs now. */
3863 generating_concat_p = 1;
3865 /* Indicate we have no need of a frame pointer yet. */
3866 frame_pointer_needed = 0;
3869 /* Initialize the rtl expansion mechanism so that we can do simple things
3870 like generate sequences. This is used to provide a context during global
3871 initialization of some passes. */
3872 void
3873 init_dummy_function_start (void)
3875 prepare_function_start (NULL);
3878 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3879 and initialize static variables for generating RTL for the statements
3880 of the function. */
3882 void
3883 init_function_start (tree subr)
3885 prepare_function_start (subr);
3887 /* Prevent ever trying to delete the first instruction of a
3888 function. Also tell final how to output a linenum before the
3889 function prologue. Note linenums could be missing, e.g. when
3890 compiling a Java .class file. */
3891 if (! DECL_IS_BUILTIN (subr))
3892 emit_line_note (DECL_SOURCE_LOCATION (subr));
3894 /* Make sure first insn is a note even if we don't want linenums.
3895 This makes sure the first insn will never be deleted.
3896 Also, final expects a note to appear there. */
3897 emit_note (NOTE_INSN_DELETED);
3899 /* Warn if this value is an aggregate type,
3900 regardless of which calling convention we are using for it. */
3901 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3902 warning (OPT_Waggregate_return, "function returns an aggregate");
3905 /* Make sure all values used by the optimization passes have sane
3906 defaults. */
3907 unsigned int
3908 init_function_for_compilation (void)
3910 reg_renumber = 0;
3912 /* No prologue/epilogue insns yet. Make sure that these vectors are
3913 empty. */
3914 gcc_assert (VEC_length (int, prologue) == 0);
3915 gcc_assert (VEC_length (int, epilogue) == 0);
3916 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3917 return 0;
3920 struct tree_opt_pass pass_init_function =
3922 NULL, /* name */
3923 NULL, /* gate */
3924 init_function_for_compilation, /* execute */
3925 NULL, /* sub */
3926 NULL, /* next */
3927 0, /* static_pass_number */
3928 0, /* tv_id */
3929 0, /* properties_required */
3930 0, /* properties_provided */
3931 0, /* properties_destroyed */
3932 0, /* todo_flags_start */
3933 0, /* todo_flags_finish */
3934 0 /* letter */
3938 void
3939 expand_main_function (void)
3941 #if (defined(INVOKE__main) \
3942 || (!defined(HAS_INIT_SECTION) \
3943 && !defined(INIT_SECTION_ASM_OP) \
3944 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3945 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3946 #endif
3949 /* Expand code to initialize the stack_protect_guard. This is invoked at
3950 the beginning of a function to be protected. */
3952 #ifndef HAVE_stack_protect_set
3953 # define HAVE_stack_protect_set 0
3954 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3955 #endif
3957 void
3958 stack_protect_prologue (void)
3960 tree guard_decl = targetm.stack_protect_guard ();
3961 rtx x, y;
3963 /* Avoid expand_expr here, because we don't want guard_decl pulled
3964 into registers unless absolutely necessary. And we know that
3965 cfun->stack_protect_guard is a local stack slot, so this skips
3966 all the fluff. */
3967 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3968 y = validize_mem (DECL_RTL (guard_decl));
3970 /* Allow the target to copy from Y to X without leaking Y into a
3971 register. */
3972 if (HAVE_stack_protect_set)
3974 rtx insn = gen_stack_protect_set (x, y);
3975 if (insn)
3977 emit_insn (insn);
3978 return;
3982 /* Otherwise do a straight move. */
3983 emit_move_insn (x, y);
3986 /* Expand code to verify the stack_protect_guard. This is invoked at
3987 the end of a function to be protected. */
3989 #ifndef HAVE_stack_protect_test
3990 # define HAVE_stack_protect_test 0
3991 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
3992 #endif
3994 void
3995 stack_protect_epilogue (void)
3997 tree guard_decl = targetm.stack_protect_guard ();
3998 rtx label = gen_label_rtx ();
3999 rtx x, y, tmp;
4001 /* Avoid expand_expr here, because we don't want guard_decl pulled
4002 into registers unless absolutely necessary. And we know that
4003 cfun->stack_protect_guard is a local stack slot, so this skips
4004 all the fluff. */
4005 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4006 y = validize_mem (DECL_RTL (guard_decl));
4008 /* Allow the target to compare Y with X without leaking either into
4009 a register. */
4010 switch (HAVE_stack_protect_test != 0)
4012 case 1:
4013 tmp = gen_stack_protect_test (x, y, label);
4014 if (tmp)
4016 emit_insn (tmp);
4017 break;
4019 /* FALLTHRU */
4021 default:
4022 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4023 break;
4026 /* The noreturn predictor has been moved to the tree level. The rtl-level
4027 predictors estimate this branch about 20%, which isn't enough to get
4028 things moved out of line. Since this is the only extant case of adding
4029 a noreturn function at the rtl level, it doesn't seem worth doing ought
4030 except adding the prediction by hand. */
4031 tmp = get_last_insn ();
4032 if (JUMP_P (tmp))
4033 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4035 expand_expr_stmt (targetm.stack_protect_fail ());
4036 emit_label (label);
4039 /* Start the RTL for a new function, and set variables used for
4040 emitting RTL.
4041 SUBR is the FUNCTION_DECL node.
4042 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4043 the function's parameters, which must be run at any return statement. */
4045 void
4046 expand_function_start (tree subr)
4048 /* Make sure volatile mem refs aren't considered
4049 valid operands of arithmetic insns. */
4050 init_recog_no_volatile ();
4052 current_function_profile
4053 = (profile_flag
4054 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4056 current_function_limit_stack
4057 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4059 /* Make the label for return statements to jump to. Do not special
4060 case machines with special return instructions -- they will be
4061 handled later during jump, ifcvt, or epilogue creation. */
4062 return_label = gen_label_rtx ();
4064 /* Initialize rtx used to return the value. */
4065 /* Do this before assign_parms so that we copy the struct value address
4066 before any library calls that assign parms might generate. */
4068 /* Decide whether to return the value in memory or in a register. */
4069 if (aggregate_value_p (DECL_RESULT (subr), subr))
4071 /* Returning something that won't go in a register. */
4072 rtx value_address = 0;
4074 #ifdef PCC_STATIC_STRUCT_RETURN
4075 if (current_function_returns_pcc_struct)
4077 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4078 value_address = assemble_static_space (size);
4080 else
4081 #endif
4083 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4084 /* Expect to be passed the address of a place to store the value.
4085 If it is passed as an argument, assign_parms will take care of
4086 it. */
4087 if (sv)
4089 value_address = gen_reg_rtx (Pmode);
4090 emit_move_insn (value_address, sv);
4093 if (value_address)
4095 rtx x = value_address;
4096 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4098 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4099 set_mem_attributes (x, DECL_RESULT (subr), 1);
4101 SET_DECL_RTL (DECL_RESULT (subr), x);
4104 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4105 /* If return mode is void, this decl rtl should not be used. */
4106 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4107 else
4109 /* Compute the return values into a pseudo reg, which we will copy
4110 into the true return register after the cleanups are done. */
4111 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4112 if (TYPE_MODE (return_type) != BLKmode
4113 && targetm.calls.return_in_msb (return_type))
4114 /* expand_function_end will insert the appropriate padding in
4115 this case. Use the return value's natural (unpadded) mode
4116 within the function proper. */
4117 SET_DECL_RTL (DECL_RESULT (subr),
4118 gen_reg_rtx (TYPE_MODE (return_type)));
4119 else
4121 /* In order to figure out what mode to use for the pseudo, we
4122 figure out what the mode of the eventual return register will
4123 actually be, and use that. */
4124 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4126 /* Structures that are returned in registers are not
4127 aggregate_value_p, so we may see a PARALLEL or a REG. */
4128 if (REG_P (hard_reg))
4129 SET_DECL_RTL (DECL_RESULT (subr),
4130 gen_reg_rtx (GET_MODE (hard_reg)));
4131 else
4133 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4134 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4138 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4139 result to the real return register(s). */
4140 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4143 /* Initialize rtx for parameters and local variables.
4144 In some cases this requires emitting insns. */
4145 assign_parms (subr);
4147 /* If function gets a static chain arg, store it. */
4148 if (cfun->static_chain_decl)
4150 tree parm = cfun->static_chain_decl;
4151 rtx local = gen_reg_rtx (Pmode);
4153 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4154 SET_DECL_RTL (parm, local);
4155 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4157 emit_move_insn (local, static_chain_incoming_rtx);
4160 /* If the function receives a non-local goto, then store the
4161 bits we need to restore the frame pointer. */
4162 if (cfun->nonlocal_goto_save_area)
4164 tree t_save;
4165 rtx r_save;
4167 /* ??? We need to do this save early. Unfortunately here is
4168 before the frame variable gets declared. Help out... */
4169 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4171 t_save = build4 (ARRAY_REF, ptr_type_node,
4172 cfun->nonlocal_goto_save_area,
4173 integer_zero_node, NULL_TREE, NULL_TREE);
4174 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4175 r_save = convert_memory_address (Pmode, r_save);
4177 emit_move_insn (r_save, virtual_stack_vars_rtx);
4178 update_nonlocal_goto_save_area ();
4181 /* The following was moved from init_function_start.
4182 The move is supposed to make sdb output more accurate. */
4183 /* Indicate the beginning of the function body,
4184 as opposed to parm setup. */
4185 emit_note (NOTE_INSN_FUNCTION_BEG);
4187 gcc_assert (NOTE_P (get_last_insn ()));
4189 parm_birth_insn = get_last_insn ();
4191 if (current_function_profile)
4193 #ifdef PROFILE_HOOK
4194 PROFILE_HOOK (current_function_funcdef_no);
4195 #endif
4198 /* After the display initializations is where the stack checking
4199 probe should go. */
4200 if(flag_stack_check)
4201 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4203 /* Make sure there is a line number after the function entry setup code. */
4204 force_next_line_note ();
4207 /* Undo the effects of init_dummy_function_start. */
4208 void
4209 expand_dummy_function_end (void)
4211 /* End any sequences that failed to be closed due to syntax errors. */
4212 while (in_sequence_p ())
4213 end_sequence ();
4215 /* Outside function body, can't compute type's actual size
4216 until next function's body starts. */
4218 free_after_parsing (cfun);
4219 free_after_compilation (cfun);
4220 cfun = 0;
4223 /* Call DOIT for each hard register used as a return value from
4224 the current function. */
4226 void
4227 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4229 rtx outgoing = current_function_return_rtx;
4231 if (! outgoing)
4232 return;
4234 if (REG_P (outgoing))
4235 (*doit) (outgoing, arg);
4236 else if (GET_CODE (outgoing) == PARALLEL)
4238 int i;
4240 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4242 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4244 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4245 (*doit) (x, arg);
4250 static void
4251 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4253 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4256 void
4257 clobber_return_register (void)
4259 diddle_return_value (do_clobber_return_reg, NULL);
4261 /* In case we do use pseudo to return value, clobber it too. */
4262 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4264 tree decl_result = DECL_RESULT (current_function_decl);
4265 rtx decl_rtl = DECL_RTL (decl_result);
4266 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4268 do_clobber_return_reg (decl_rtl, NULL);
4273 static void
4274 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4276 emit_insn (gen_rtx_USE (VOIDmode, reg));
4279 static void
4280 use_return_register (void)
4282 diddle_return_value (do_use_return_reg, NULL);
4285 /* Possibly warn about unused parameters. */
4286 void
4287 do_warn_unused_parameter (tree fn)
4289 tree decl;
4291 for (decl = DECL_ARGUMENTS (fn);
4292 decl; decl = TREE_CHAIN (decl))
4293 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4294 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4295 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4298 static GTY(()) rtx initial_trampoline;
4300 /* Generate RTL for the end of the current function. */
4302 void
4303 expand_function_end (void)
4305 rtx clobber_after;
4307 /* If arg_pointer_save_area was referenced only from a nested
4308 function, we will not have initialized it yet. Do that now. */
4309 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4310 get_arg_pointer_save_area (cfun);
4312 /* If we are doing stack checking and this function makes calls,
4313 do a stack probe at the start of the function to ensure we have enough
4314 space for another stack frame. */
4315 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4317 rtx insn, seq;
4319 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4320 if (CALL_P (insn))
4322 start_sequence ();
4323 probe_stack_range (STACK_CHECK_PROTECT,
4324 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4325 seq = get_insns ();
4326 end_sequence ();
4327 emit_insn_before (seq, stack_check_probe_note);
4328 break;
4332 /* Possibly warn about unused parameters.
4333 When frontend does unit-at-a-time, the warning is already
4334 issued at finalization time. */
4335 if (warn_unused_parameter
4336 && !lang_hooks.callgraph.expand_function)
4337 do_warn_unused_parameter (current_function_decl);
4339 /* End any sequences that failed to be closed due to syntax errors. */
4340 while (in_sequence_p ())
4341 end_sequence ();
4343 clear_pending_stack_adjust ();
4344 do_pending_stack_adjust ();
4346 /* Output a linenumber for the end of the function.
4347 SDB depends on this. */
4348 force_next_line_note ();
4349 emit_line_note (input_location);
4351 /* Before the return label (if any), clobber the return
4352 registers so that they are not propagated live to the rest of
4353 the function. This can only happen with functions that drop
4354 through; if there had been a return statement, there would
4355 have either been a return rtx, or a jump to the return label.
4357 We delay actual code generation after the current_function_value_rtx
4358 is computed. */
4359 clobber_after = get_last_insn ();
4361 /* Output the label for the actual return from the function. */
4362 emit_label (return_label);
4364 if (USING_SJLJ_EXCEPTIONS)
4366 /* Let except.c know where it should emit the call to unregister
4367 the function context for sjlj exceptions. */
4368 if (flag_exceptions)
4369 sjlj_emit_function_exit_after (get_last_insn ());
4371 else
4373 /* @@@ This is a kludge. We want to ensure that instructions that
4374 may trap are not moved into the epilogue by scheduling, because
4375 we don't always emit unwind information for the epilogue.
4376 However, not all machine descriptions define a blockage insn, so
4377 emit an ASM_INPUT to act as one. */
4378 if (flag_non_call_exceptions)
4379 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4382 /* If this is an implementation of throw, do what's necessary to
4383 communicate between __builtin_eh_return and the epilogue. */
4384 expand_eh_return ();
4386 /* If scalar return value was computed in a pseudo-reg, or was a named
4387 return value that got dumped to the stack, copy that to the hard
4388 return register. */
4389 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4391 tree decl_result = DECL_RESULT (current_function_decl);
4392 rtx decl_rtl = DECL_RTL (decl_result);
4394 if (REG_P (decl_rtl)
4395 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4396 : DECL_REGISTER (decl_result))
4398 rtx real_decl_rtl = current_function_return_rtx;
4400 /* This should be set in assign_parms. */
4401 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4403 /* If this is a BLKmode structure being returned in registers,
4404 then use the mode computed in expand_return. Note that if
4405 decl_rtl is memory, then its mode may have been changed,
4406 but that current_function_return_rtx has not. */
4407 if (GET_MODE (real_decl_rtl) == BLKmode)
4408 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4410 /* If a non-BLKmode return value should be padded at the least
4411 significant end of the register, shift it left by the appropriate
4412 amount. BLKmode results are handled using the group load/store
4413 machinery. */
4414 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4415 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4417 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4418 REGNO (real_decl_rtl)),
4419 decl_rtl);
4420 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4422 /* If a named return value dumped decl_return to memory, then
4423 we may need to re-do the PROMOTE_MODE signed/unsigned
4424 extension. */
4425 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4427 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4429 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4430 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4431 &unsignedp, 1);
4433 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4435 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4437 /* If expand_function_start has created a PARALLEL for decl_rtl,
4438 move the result to the real return registers. Otherwise, do
4439 a group load from decl_rtl for a named return. */
4440 if (GET_CODE (decl_rtl) == PARALLEL)
4441 emit_group_move (real_decl_rtl, decl_rtl);
4442 else
4443 emit_group_load (real_decl_rtl, decl_rtl,
4444 TREE_TYPE (decl_result),
4445 int_size_in_bytes (TREE_TYPE (decl_result)));
4447 /* In the case of complex integer modes smaller than a word, we'll
4448 need to generate some non-trivial bitfield insertions. Do that
4449 on a pseudo and not the hard register. */
4450 else if (GET_CODE (decl_rtl) == CONCAT
4451 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4452 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4454 int old_generating_concat_p;
4455 rtx tmp;
4457 old_generating_concat_p = generating_concat_p;
4458 generating_concat_p = 0;
4459 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4460 generating_concat_p = old_generating_concat_p;
4462 emit_move_insn (tmp, decl_rtl);
4463 emit_move_insn (real_decl_rtl, tmp);
4465 else
4466 emit_move_insn (real_decl_rtl, decl_rtl);
4470 /* If returning a structure, arrange to return the address of the value
4471 in a place where debuggers expect to find it.
4473 If returning a structure PCC style,
4474 the caller also depends on this value.
4475 And current_function_returns_pcc_struct is not necessarily set. */
4476 if (current_function_returns_struct
4477 || current_function_returns_pcc_struct)
4479 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4480 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4481 rtx outgoing;
4483 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4484 type = TREE_TYPE (type);
4485 else
4486 value_address = XEXP (value_address, 0);
4488 outgoing = targetm.calls.function_value (build_pointer_type (type),
4489 current_function_decl, true);
4491 /* Mark this as a function return value so integrate will delete the
4492 assignment and USE below when inlining this function. */
4493 REG_FUNCTION_VALUE_P (outgoing) = 1;
4495 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4496 value_address = convert_memory_address (GET_MODE (outgoing),
4497 value_address);
4499 emit_move_insn (outgoing, value_address);
4501 /* Show return register used to hold result (in this case the address
4502 of the result. */
4503 current_function_return_rtx = outgoing;
4506 /* Emit the actual code to clobber return register. */
4508 rtx seq;
4510 start_sequence ();
4511 clobber_return_register ();
4512 expand_naked_return ();
4513 seq = get_insns ();
4514 end_sequence ();
4516 emit_insn_after (seq, clobber_after);
4519 /* Output the label for the naked return from the function. */
4520 emit_label (naked_return_label);
4522 /* If stack protection is enabled for this function, check the guard. */
4523 if (cfun->stack_protect_guard)
4524 stack_protect_epilogue ();
4526 /* If we had calls to alloca, and this machine needs
4527 an accurate stack pointer to exit the function,
4528 insert some code to save and restore the stack pointer. */
4529 if (! EXIT_IGNORE_STACK
4530 && current_function_calls_alloca)
4532 rtx tem = 0;
4534 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4535 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4538 /* ??? This should no longer be necessary since stupid is no longer with
4539 us, but there are some parts of the compiler (eg reload_combine, and
4540 sh mach_dep_reorg) that still try and compute their own lifetime info
4541 instead of using the general framework. */
4542 use_return_register ();
4546 get_arg_pointer_save_area (struct function *f)
4548 rtx ret = f->x_arg_pointer_save_area;
4550 if (! ret)
4552 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4553 f->x_arg_pointer_save_area = ret;
4556 if (f == cfun && ! f->arg_pointer_save_area_init)
4558 rtx seq;
4560 /* Save the arg pointer at the beginning of the function. The
4561 generated stack slot may not be a valid memory address, so we
4562 have to check it and fix it if necessary. */
4563 start_sequence ();
4564 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4565 seq = get_insns ();
4566 end_sequence ();
4568 push_topmost_sequence ();
4569 emit_insn_after (seq, entry_of_function ());
4570 pop_topmost_sequence ();
4573 return ret;
4576 /* Extend a vector that records the INSN_UIDs of INSNS
4577 (a list of one or more insns). */
4579 static void
4580 record_insns (rtx insns, VEC(int,heap) **vecp)
4582 rtx tmp;
4584 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4585 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4588 /* Set the locator of the insn chain starting at INSN to LOC. */
4589 static void
4590 set_insn_locators (rtx insn, int loc)
4592 while (insn != NULL_RTX)
4594 if (INSN_P (insn))
4595 INSN_LOCATOR (insn) = loc;
4596 insn = NEXT_INSN (insn);
4600 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4601 be running after reorg, SEQUENCE rtl is possible. */
4603 static int
4604 contains (rtx insn, VEC(int,heap) **vec)
4606 int i, j;
4608 if (NONJUMP_INSN_P (insn)
4609 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4611 int count = 0;
4612 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4613 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4614 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4615 == VEC_index (int, *vec, j))
4616 count++;
4617 return count;
4619 else
4621 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4622 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4623 return 1;
4625 return 0;
4629 prologue_epilogue_contains (rtx insn)
4631 if (contains (insn, &prologue))
4632 return 1;
4633 if (contains (insn, &epilogue))
4634 return 1;
4635 return 0;
4639 sibcall_epilogue_contains (rtx insn)
4641 if (sibcall_epilogue)
4642 return contains (insn, &sibcall_epilogue);
4643 return 0;
4646 #ifdef HAVE_return
4647 /* Insert gen_return at the end of block BB. This also means updating
4648 block_for_insn appropriately. */
4650 static void
4651 emit_return_into_block (basic_block bb)
4653 emit_jump_insn_after (gen_return (), BB_END (bb));
4655 #endif /* HAVE_return */
4657 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4659 /* These functions convert the epilogue into a variant that does not
4660 modify the stack pointer. This is used in cases where a function
4661 returns an object whose size is not known until it is computed.
4662 The called function leaves the object on the stack, leaves the
4663 stack depressed, and returns a pointer to the object.
4665 What we need to do is track all modifications and references to the
4666 stack pointer, deleting the modifications and changing the
4667 references to point to the location the stack pointer would have
4668 pointed to had the modifications taken place.
4670 These functions need to be portable so we need to make as few
4671 assumptions about the epilogue as we can. However, the epilogue
4672 basically contains three things: instructions to reset the stack
4673 pointer, instructions to reload registers, possibly including the
4674 frame pointer, and an instruction to return to the caller.
4676 We must be sure of what a relevant epilogue insn is doing. We also
4677 make no attempt to validate the insns we make since if they are
4678 invalid, we probably can't do anything valid. The intent is that
4679 these routines get "smarter" as more and more machines start to use
4680 them and they try operating on different epilogues.
4682 We use the following structure to track what the part of the
4683 epilogue that we've already processed has done. We keep two copies
4684 of the SP equivalence, one for use during the insn we are
4685 processing and one for use in the next insn. The difference is
4686 because one part of a PARALLEL may adjust SP and the other may use
4687 it. */
4689 struct epi_info
4691 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4692 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4693 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4694 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4695 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4696 should be set to once we no longer need
4697 its value. */
4698 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4699 for registers. */
4702 static void handle_epilogue_set (rtx, struct epi_info *);
4703 static void update_epilogue_consts (rtx, rtx, void *);
4704 static void emit_equiv_load (struct epi_info *);
4706 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4707 no modifications to the stack pointer. Return the new list of insns. */
4709 static rtx
4710 keep_stack_depressed (rtx insns)
4712 int j;
4713 struct epi_info info;
4714 rtx insn, next;
4716 /* If the epilogue is just a single instruction, it must be OK as is. */
4717 if (NEXT_INSN (insns) == NULL_RTX)
4718 return insns;
4720 /* Otherwise, start a sequence, initialize the information we have, and
4721 process all the insns we were given. */
4722 start_sequence ();
4724 info.sp_equiv_reg = stack_pointer_rtx;
4725 info.sp_offset = 0;
4726 info.equiv_reg_src = 0;
4728 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4729 info.const_equiv[j] = 0;
4731 insn = insns;
4732 next = NULL_RTX;
4733 while (insn != NULL_RTX)
4735 next = NEXT_INSN (insn);
4737 if (!INSN_P (insn))
4739 add_insn (insn);
4740 insn = next;
4741 continue;
4744 /* If this insn references the register that SP is equivalent to and
4745 we have a pending load to that register, we must force out the load
4746 first and then indicate we no longer know what SP's equivalent is. */
4747 if (info.equiv_reg_src != 0
4748 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4750 emit_equiv_load (&info);
4751 info.sp_equiv_reg = 0;
4754 info.new_sp_equiv_reg = info.sp_equiv_reg;
4755 info.new_sp_offset = info.sp_offset;
4757 /* If this is a (RETURN) and the return address is on the stack,
4758 update the address and change to an indirect jump. */
4759 if (GET_CODE (PATTERN (insn)) == RETURN
4760 || (GET_CODE (PATTERN (insn)) == PARALLEL
4761 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4763 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4764 rtx base = 0;
4765 HOST_WIDE_INT offset = 0;
4766 rtx jump_insn, jump_set;
4768 /* If the return address is in a register, we can emit the insn
4769 unchanged. Otherwise, it must be a MEM and we see what the
4770 base register and offset are. In any case, we have to emit any
4771 pending load to the equivalent reg of SP, if any. */
4772 if (REG_P (retaddr))
4774 emit_equiv_load (&info);
4775 add_insn (insn);
4776 insn = next;
4777 continue;
4779 else
4781 rtx ret_ptr;
4782 gcc_assert (MEM_P (retaddr));
4784 ret_ptr = XEXP (retaddr, 0);
4786 if (REG_P (ret_ptr))
4788 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4789 offset = 0;
4791 else
4793 gcc_assert (GET_CODE (ret_ptr) == PLUS
4794 && REG_P (XEXP (ret_ptr, 0))
4795 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4796 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4797 offset = INTVAL (XEXP (ret_ptr, 1));
4801 /* If the base of the location containing the return pointer
4802 is SP, we must update it with the replacement address. Otherwise,
4803 just build the necessary MEM. */
4804 retaddr = plus_constant (base, offset);
4805 if (base == stack_pointer_rtx)
4806 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4807 plus_constant (info.sp_equiv_reg,
4808 info.sp_offset));
4810 retaddr = gen_rtx_MEM (Pmode, retaddr);
4811 MEM_NOTRAP_P (retaddr) = 1;
4813 /* If there is a pending load to the equivalent register for SP
4814 and we reference that register, we must load our address into
4815 a scratch register and then do that load. */
4816 if (info.equiv_reg_src
4817 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4819 unsigned int regno;
4820 rtx reg;
4822 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4823 if (HARD_REGNO_MODE_OK (regno, Pmode)
4824 && !fixed_regs[regno]
4825 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4826 && !REGNO_REG_SET_P
4827 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4828 && !refers_to_regno_p (regno,
4829 regno + hard_regno_nregs[regno]
4830 [Pmode],
4831 info.equiv_reg_src, NULL)
4832 && info.const_equiv[regno] == 0)
4833 break;
4835 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4837 reg = gen_rtx_REG (Pmode, regno);
4838 emit_move_insn (reg, retaddr);
4839 retaddr = reg;
4842 emit_equiv_load (&info);
4843 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4845 /* Show the SET in the above insn is a RETURN. */
4846 jump_set = single_set (jump_insn);
4847 gcc_assert (jump_set);
4848 SET_IS_RETURN_P (jump_set) = 1;
4851 /* If SP is not mentioned in the pattern and its equivalent register, if
4852 any, is not modified, just emit it. Otherwise, if neither is set,
4853 replace the reference to SP and emit the insn. If none of those are
4854 true, handle each SET individually. */
4855 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4856 && (info.sp_equiv_reg == stack_pointer_rtx
4857 || !reg_set_p (info.sp_equiv_reg, insn)))
4858 add_insn (insn);
4859 else if (! reg_set_p (stack_pointer_rtx, insn)
4860 && (info.sp_equiv_reg == stack_pointer_rtx
4861 || !reg_set_p (info.sp_equiv_reg, insn)))
4863 int changed;
4865 changed = validate_replace_rtx (stack_pointer_rtx,
4866 plus_constant (info.sp_equiv_reg,
4867 info.sp_offset),
4868 insn);
4869 gcc_assert (changed);
4871 add_insn (insn);
4873 else if (GET_CODE (PATTERN (insn)) == SET)
4874 handle_epilogue_set (PATTERN (insn), &info);
4875 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4877 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4878 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4879 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4881 else
4882 add_insn (insn);
4884 info.sp_equiv_reg = info.new_sp_equiv_reg;
4885 info.sp_offset = info.new_sp_offset;
4887 /* Now update any constants this insn sets. */
4888 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4889 insn = next;
4892 insns = get_insns ();
4893 end_sequence ();
4894 return insns;
4897 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4898 structure that contains information about what we've seen so far. We
4899 process this SET by either updating that data or by emitting one or
4900 more insns. */
4902 static void
4903 handle_epilogue_set (rtx set, struct epi_info *p)
4905 /* First handle the case where we are setting SP. Record what it is being
4906 set from, which we must be able to determine */
4907 if (reg_set_p (stack_pointer_rtx, set))
4909 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4911 if (GET_CODE (SET_SRC (set)) == PLUS)
4913 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4914 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4915 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4916 else
4918 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4919 && (REGNO (XEXP (SET_SRC (set), 1))
4920 < FIRST_PSEUDO_REGISTER)
4921 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4922 p->new_sp_offset
4923 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4926 else
4927 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4929 /* If we are adjusting SP, we adjust from the old data. */
4930 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4932 p->new_sp_equiv_reg = p->sp_equiv_reg;
4933 p->new_sp_offset += p->sp_offset;
4936 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4938 return;
4941 /* Next handle the case where we are setting SP's equivalent
4942 register. We must not already have a value to set it to. We
4943 could update, but there seems little point in handling that case.
4944 Note that we have to allow for the case where we are setting the
4945 register set in the previous part of a PARALLEL inside a single
4946 insn. But use the old offset for any updates within this insn.
4947 We must allow for the case where the register is being set in a
4948 different (usually wider) mode than Pmode). */
4949 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4951 gcc_assert (!p->equiv_reg_src
4952 && REG_P (p->new_sp_equiv_reg)
4953 && REG_P (SET_DEST (set))
4954 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4955 <= BITS_PER_WORD)
4956 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4957 p->equiv_reg_src
4958 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4959 plus_constant (p->sp_equiv_reg,
4960 p->sp_offset));
4963 /* Otherwise, replace any references to SP in the insn to its new value
4964 and emit the insn. */
4965 else
4967 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4968 plus_constant (p->sp_equiv_reg,
4969 p->sp_offset));
4970 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4971 plus_constant (p->sp_equiv_reg,
4972 p->sp_offset));
4973 emit_insn (set);
4977 /* Update the tracking information for registers set to constants. */
4979 static void
4980 update_epilogue_consts (rtx dest, rtx x, void *data)
4982 struct epi_info *p = (struct epi_info *) data;
4983 rtx new;
4985 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4986 return;
4988 /* If we are either clobbering a register or doing a partial set,
4989 show we don't know the value. */
4990 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4991 p->const_equiv[REGNO (dest)] = 0;
4993 /* If we are setting it to a constant, record that constant. */
4994 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4995 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4997 /* If this is a binary operation between a register we have been tracking
4998 and a constant, see if we can compute a new constant value. */
4999 else if (ARITHMETIC_P (SET_SRC (x))
5000 && REG_P (XEXP (SET_SRC (x), 0))
5001 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5002 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5003 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5004 && 0 != (new = simplify_binary_operation
5005 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5006 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5007 XEXP (SET_SRC (x), 1)))
5008 && GET_CODE (new) == CONST_INT)
5009 p->const_equiv[REGNO (dest)] = new;
5011 /* Otherwise, we can't do anything with this value. */
5012 else
5013 p->const_equiv[REGNO (dest)] = 0;
5016 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5018 static void
5019 emit_equiv_load (struct epi_info *p)
5021 if (p->equiv_reg_src != 0)
5023 rtx dest = p->sp_equiv_reg;
5025 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5026 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5027 REGNO (p->sp_equiv_reg));
5029 emit_move_insn (dest, p->equiv_reg_src);
5030 p->equiv_reg_src = 0;
5033 #endif
5035 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5036 this into place with notes indicating where the prologue ends and where
5037 the epilogue begins. Update the basic block information when possible. */
5039 void
5040 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5042 int inserted = 0;
5043 edge e;
5044 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5045 rtx seq;
5046 #endif
5047 #ifdef HAVE_prologue
5048 rtx prologue_end = NULL_RTX;
5049 #endif
5050 #if defined (HAVE_epilogue) || defined(HAVE_return)
5051 rtx epilogue_end = NULL_RTX;
5052 #endif
5053 edge_iterator ei;
5055 #ifdef HAVE_prologue
5056 if (HAVE_prologue)
5058 start_sequence ();
5059 seq = gen_prologue ();
5060 emit_insn (seq);
5062 /* Retain a map of the prologue insns. */
5063 record_insns (seq, &prologue);
5064 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5066 seq = get_insns ();
5067 end_sequence ();
5068 set_insn_locators (seq, prologue_locator);
5070 /* Can't deal with multiple successors of the entry block
5071 at the moment. Function should always have at least one
5072 entry point. */
5073 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5075 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5076 inserted = 1;
5078 #endif
5080 /* If the exit block has no non-fake predecessors, we don't need
5081 an epilogue. */
5082 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5083 if ((e->flags & EDGE_FAKE) == 0)
5084 break;
5085 if (e == NULL)
5086 goto epilogue_done;
5088 #ifdef HAVE_return
5089 if (optimize && HAVE_return)
5091 /* If we're allowed to generate a simple return instruction,
5092 then by definition we don't need a full epilogue. Examine
5093 the block that falls through to EXIT. If it does not
5094 contain any code, examine its predecessors and try to
5095 emit (conditional) return instructions. */
5097 basic_block last;
5098 rtx label;
5100 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5101 if (e->flags & EDGE_FALLTHRU)
5102 break;
5103 if (e == NULL)
5104 goto epilogue_done;
5105 last = e->src;
5107 /* Verify that there are no active instructions in the last block. */
5108 label = BB_END (last);
5109 while (label && !LABEL_P (label))
5111 if (active_insn_p (label))
5112 break;
5113 label = PREV_INSN (label);
5116 if (BB_HEAD (last) == label && LABEL_P (label))
5118 edge_iterator ei2;
5120 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5122 basic_block bb = e->src;
5123 rtx jump;
5125 if (bb == ENTRY_BLOCK_PTR)
5127 ei_next (&ei2);
5128 continue;
5131 jump = BB_END (bb);
5132 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5134 ei_next (&ei2);
5135 continue;
5138 /* If we have an unconditional jump, we can replace that
5139 with a simple return instruction. */
5140 if (simplejump_p (jump))
5142 emit_return_into_block (bb);
5143 delete_insn (jump);
5146 /* If we have a conditional jump, we can try to replace
5147 that with a conditional return instruction. */
5148 else if (condjump_p (jump))
5150 if (! redirect_jump (jump, 0, 0))
5152 ei_next (&ei2);
5153 continue;
5156 /* If this block has only one successor, it both jumps
5157 and falls through to the fallthru block, so we can't
5158 delete the edge. */
5159 if (single_succ_p (bb))
5161 ei_next (&ei2);
5162 continue;
5165 else
5167 ei_next (&ei2);
5168 continue;
5171 /* Fix up the CFG for the successful change we just made. */
5172 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5175 /* Emit a return insn for the exit fallthru block. Whether
5176 this is still reachable will be determined later. */
5178 emit_barrier_after (BB_END (last));
5179 emit_return_into_block (last);
5180 epilogue_end = BB_END (last);
5181 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5182 goto epilogue_done;
5185 #endif
5186 /* Find the edge that falls through to EXIT. Other edges may exist
5187 due to RETURN instructions, but those don't need epilogues.
5188 There really shouldn't be a mixture -- either all should have
5189 been converted or none, however... */
5191 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5192 if (e->flags & EDGE_FALLTHRU)
5193 break;
5194 if (e == NULL)
5195 goto epilogue_done;
5197 #ifdef HAVE_epilogue
5198 if (HAVE_epilogue)
5200 start_sequence ();
5201 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5203 seq = gen_epilogue ();
5205 #ifdef INCOMING_RETURN_ADDR_RTX
5206 /* If this function returns with the stack depressed and we can support
5207 it, massage the epilogue to actually do that. */
5208 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5209 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5210 seq = keep_stack_depressed (seq);
5211 #endif
5213 emit_jump_insn (seq);
5215 /* Retain a map of the epilogue insns. */
5216 record_insns (seq, &epilogue);
5217 set_insn_locators (seq, epilogue_locator);
5219 seq = get_insns ();
5220 end_sequence ();
5222 insert_insn_on_edge (seq, e);
5223 inserted = 1;
5225 else
5226 #endif
5228 basic_block cur_bb;
5230 if (! next_active_insn (BB_END (e->src)))
5231 goto epilogue_done;
5232 /* We have a fall-through edge to the exit block, the source is not
5233 at the end of the function, and there will be an assembler epilogue
5234 at the end of the function.
5235 We can't use force_nonfallthru here, because that would try to
5236 use return. Inserting a jump 'by hand' is extremely messy, so
5237 we take advantage of cfg_layout_finalize using
5238 fixup_fallthru_exit_predecessor. */
5239 cfg_layout_initialize (0);
5240 FOR_EACH_BB (cur_bb)
5241 if (cur_bb->index >= NUM_FIXED_BLOCKS
5242 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5243 cur_bb->aux = cur_bb->next_bb;
5244 cfg_layout_finalize ();
5246 epilogue_done:
5248 if (inserted)
5249 commit_edge_insertions ();
5251 #ifdef HAVE_sibcall_epilogue
5252 /* Emit sibling epilogues before any sibling call sites. */
5253 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5255 basic_block bb = e->src;
5256 rtx insn = BB_END (bb);
5258 if (!CALL_P (insn)
5259 || ! SIBLING_CALL_P (insn))
5261 ei_next (&ei);
5262 continue;
5265 start_sequence ();
5266 emit_insn (gen_sibcall_epilogue ());
5267 seq = get_insns ();
5268 end_sequence ();
5270 /* Retain a map of the epilogue insns. Used in life analysis to
5271 avoid getting rid of sibcall epilogue insns. Do this before we
5272 actually emit the sequence. */
5273 record_insns (seq, &sibcall_epilogue);
5274 set_insn_locators (seq, epilogue_locator);
5276 emit_insn_before (seq, insn);
5277 ei_next (&ei);
5279 #endif
5281 #ifdef HAVE_epilogue
5282 if (epilogue_end)
5284 rtx insn, next;
5286 /* Similarly, move any line notes that appear after the epilogue.
5287 There is no need, however, to be quite so anal about the existence
5288 of such a note. Also possibly move
5289 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5290 info generation. */
5291 for (insn = epilogue_end; insn; insn = next)
5293 next = NEXT_INSN (insn);
5294 if (NOTE_P (insn)
5295 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG))
5296 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5299 #endif
5302 /* Reposition the prologue-end and epilogue-begin notes after instruction
5303 scheduling and delayed branch scheduling. */
5305 void
5306 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5308 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5309 rtx insn, last, note;
5310 int len;
5312 if ((len = VEC_length (int, prologue)) > 0)
5314 last = 0, note = 0;
5316 /* Scan from the beginning until we reach the last prologue insn.
5317 We apparently can't depend on basic_block_{head,end} after
5318 reorg has run. */
5319 for (insn = f; insn; insn = NEXT_INSN (insn))
5321 if (NOTE_P (insn))
5323 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5324 note = insn;
5326 else if (contains (insn, &prologue))
5328 last = insn;
5329 if (--len == 0)
5330 break;
5334 if (last)
5336 /* Find the prologue-end note if we haven't already, and
5337 move it to just after the last prologue insn. */
5338 if (note == 0)
5340 for (note = last; (note = NEXT_INSN (note));)
5341 if (NOTE_P (note)
5342 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5343 break;
5346 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5347 if (LABEL_P (last))
5348 last = NEXT_INSN (last);
5349 reorder_insns (note, note, last);
5353 if ((len = VEC_length (int, epilogue)) > 0)
5355 last = 0, note = 0;
5357 /* Scan from the end until we reach the first epilogue insn.
5358 We apparently can't depend on basic_block_{head,end} after
5359 reorg has run. */
5360 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5362 if (NOTE_P (insn))
5364 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5365 note = insn;
5367 else if (contains (insn, &epilogue))
5369 last = insn;
5370 if (--len == 0)
5371 break;
5375 if (last)
5377 /* Find the epilogue-begin note if we haven't already, and
5378 move it to just before the first epilogue insn. */
5379 if (note == 0)
5381 for (note = insn; (note = PREV_INSN (note));)
5382 if (NOTE_P (note)
5383 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5384 break;
5387 if (PREV_INSN (last) != note)
5388 reorder_insns (note, note, PREV_INSN (last));
5391 #endif /* HAVE_prologue or HAVE_epilogue */
5394 /* Resets insn_block_boundaries array. */
5396 void
5397 reset_block_changes (void)
5399 cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5400 VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5403 /* Record the boundary for BLOCK. */
5404 void
5405 record_block_change (tree block)
5407 int i, n;
5408 tree last_block;
5410 if (!block)
5411 return;
5413 if(!cfun->ib_boundaries_block)
5414 return;
5416 last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5417 n = get_max_uid ();
5418 for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5419 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5421 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5424 /* Finishes record of boundaries. */
5425 void
5426 finalize_block_changes (void)
5428 record_block_change (DECL_INITIAL (current_function_decl));
5431 /* For INSN return the BLOCK it belongs to. */
5432 void
5433 check_block_change (rtx insn, tree *block)
5435 unsigned uid = INSN_UID (insn);
5437 if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5438 return;
5440 *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5443 /* Releases the ib_boundaries_block records. */
5444 void
5445 free_block_changes (void)
5447 VEC_free (tree, gc, cfun->ib_boundaries_block);
5450 /* Returns the name of the current function. */
5451 const char *
5452 current_function_name (void)
5454 return lang_hooks.decl_printable_name (cfun->decl, 2);
5459 /* This recursive function finds and returns CALL expression in X. */
5460 static rtx
5461 get_call (rtx x)
5463 int i;
5464 rtx call_rtx;
5465 const char *fmt;
5466 enum rtx_code code = GET_CODE (x);
5468 /* Ignore registers in memory. */
5469 if (code == CALL)
5470 return x;
5472 fmt = GET_RTX_FORMAT (code);
5473 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5475 if (fmt [i] == 'e')
5477 if ((call_rtx = get_call (XEXP (x, i))) != NULL_RTX)
5478 return call_rtx;
5480 else if (fmt [i] == 'E')
5482 int j;
5484 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5485 if ((call_rtx = get_call (XVECEXP (x, i, j))) != NULL_RTX)
5486 return call_rtx;
5489 return NULL_RTX;
5492 /* This function returns call unsaved registers invalidated (if
5493 CLOBBERED_P) or used by function called by INSN through REGS. */
5494 void
5495 get_call_invalidated_used_regs (rtx insn, HARD_REG_SET *regs, bool clobbered_p)
5497 rtx x;
5498 struct cgraph_node *node;
5499 tree decl = NULL;
5501 gcc_assert (CALL_P (insn));
5503 x = get_call (PATTERN (insn));
5504 if (x != NULL_RTX)
5506 x = XEXP (x, 0);
5507 gcc_assert (GET_CODE (x) == MEM);
5508 x = XEXP (x, 0);
5509 if (GET_CODE (x) == SYMBOL_REF)
5510 decl = SYMBOL_REF_DECL (x);
5511 if (decl != NULL && TREE_CODE (decl) != FUNCTION_DECL)
5512 decl = NULL;
5514 node = decl == NULL ? NULL : cgraph_node (decl);
5515 if (! flag_ira || ! flag_ipra || node == NULL
5516 /* This is a call of the function itself. We don't know used
5517 register yet. So take the worst case. */
5518 || node->decl == cfun->decl)
5520 if (clobbered_p)
5521 COPY_HARD_REG_SET (*regs, regs_invalidated_by_call);
5522 else
5523 COPY_HARD_REG_SET (*regs, call_used_reg_set);
5525 else
5527 COPY_HARD_REG_SET (*regs, node->function_used_regs);
5528 if (clobbered_p)
5529 AND_HARD_REG_SET (*regs, regs_invalidated_by_call);
5535 static unsigned int
5536 rest_of_handle_check_leaf_regs (void)
5538 #ifdef LEAF_REGISTERS
5539 current_function_uses_only_leaf_regs
5540 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5541 #endif
5542 return 0;
5545 /* Insert a TYPE into the used types hash table of CFUN. */
5546 static void
5547 used_types_insert_helper (tree type, struct function *func)
5549 if (type != NULL && func != NULL)
5551 void **slot;
5553 if (func->used_types_hash == NULL)
5554 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5555 htab_eq_pointer, NULL);
5556 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5557 if (*slot == NULL)
5558 *slot = type;
5562 /* Given a type, insert it into the used hash table in cfun. */
5563 void
5564 used_types_insert (tree t)
5566 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5567 t = TREE_TYPE (t);
5568 t = TYPE_MAIN_VARIANT (t);
5569 if (debug_info_level > DINFO_LEVEL_NONE)
5570 used_types_insert_helper (t, cfun);
5573 struct tree_opt_pass pass_leaf_regs =
5575 NULL, /* name */
5576 NULL, /* gate */
5577 rest_of_handle_check_leaf_regs, /* execute */
5578 NULL, /* sub */
5579 NULL, /* next */
5580 0, /* static_pass_number */
5581 0, /* tv_id */
5582 0, /* properties_required */
5583 0, /* properties_provided */
5584 0, /* properties_destroyed */
5585 0, /* todo_flags_start */
5586 0, /* todo_flags_finish */
5587 0 /* letter */
5591 #include "gt-function.h"