Add SB-1 specific multilib support. Patch by Fred Fish.
[official-gcc.git] / gcc / function.c
blob177b6184ed7d61ad2275404bf82a7f0d332e0265
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
66 #include "vecprim.h"
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 life_analysis has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
153 struct temp_slot GTY(())
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
160 /* The rtx to used to reference the slot. */
161 rtx slot;
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
165 /* The alignment (in bits) of the slot. */
166 unsigned int align;
167 /* The size, in units, of the slot. */
168 HOST_WIDE_INT size;
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
174 /* Nonzero if this temporary is currently in use. */
175 char in_use;
176 /* Nonzero if this temporary has its address taken. */
177 char addr_taken;
178 /* Nesting level at which this slot is being used. */
179 int level;
180 /* Nonzero if this should survive a call to free_temp_slots. */
181 int keep;
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
190 /* Forward declarations. */
192 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
193 struct function *);
194 static struct temp_slot *find_temp_slot_from_address (rtx);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205 static int contains (rtx, VEC(int,heap) **);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block, rtx);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function *outer_function_chain;
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
223 struct function *
224 find_function_data (tree decl)
226 struct function *p;
228 for (p = outer_function_chain; p; p = p->outer)
229 if (p->decl == decl)
230 return p;
232 gcc_unreachable ();
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
239 variables. */
241 void
242 push_function_context_to (tree context ATTRIBUTE_UNUSED)
244 struct function *p;
246 if (cfun == 0)
247 init_dummy_function_start ();
248 p = cfun;
250 p->outer = outer_function_chain;
251 outer_function_chain = p;
253 lang_hooks.function.enter_nested (p);
255 cfun = 0;
258 void
259 push_function_context (void)
261 push_function_context_to (current_function_decl);
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
267 void
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
270 struct function *p = outer_function_chain;
272 cfun = p;
273 outer_function_chain = p->outer;
275 current_function_decl = p->decl;
277 lang_hooks.function.leave_nested (p);
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated = 0;
281 generating_concat_p = 1;
284 void
285 pop_function_context (void)
287 pop_function_context_from (current_function_decl);
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
294 void
295 free_after_parsing (struct function *f)
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
302 lang_hooks.function.final (f);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
309 void
310 free_after_compilation (struct function *f)
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
321 f->cfg = NULL;
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
328 f->x_nonlocal_goto_handler_labels = NULL;
329 f->x_return_label = NULL;
330 f->x_naked_return_label = NULL;
331 f->x_stack_slot_list = NULL;
332 f->x_stack_check_probe_note = NULL;
333 f->x_arg_pointer_save_area = NULL;
334 f->x_parm_birth_insn = NULL;
335 f->epilogue_delay_list = NULL;
338 /* Allocate fixed slots in the stack frame of the current function. */
340 /* Return size needed for stack frame based on slots so far allocated in
341 function F.
342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
343 the caller may have to do that. */
345 static HOST_WIDE_INT
346 get_func_frame_size (struct function *f)
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
350 else
351 return f->x_frame_offset;
354 /* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
358 HOST_WIDE_INT
359 get_frame_size (void)
361 return get_func_frame_size (cfun);
364 /* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
366 return FALSE. */
368 bool
369 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
377 error ("%Jtotal size of local objects too large", func);
378 return TRUE;
381 return FALSE;
384 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
390 -2 means use BITS_PER_UNIT,
391 positive specifies alignment boundary in bits.
393 We do not round to stack_boundary here.
395 FUNCTION specifies the function to allocate in. */
397 static rtx
398 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
401 rtx x, addr;
402 int bigend_correction = 0;
403 unsigned int alignment;
404 int frame_off, frame_alignment, frame_phase;
406 if (align == 0)
408 tree type;
410 if (mode == BLKmode)
411 alignment = BIGGEST_ALIGNMENT;
412 else
413 alignment = GET_MODE_ALIGNMENT (mode);
415 /* Allow the target to (possibly) increase the alignment of this
416 stack slot. */
417 type = lang_hooks.types.type_for_mode (mode, 0);
418 if (type)
419 alignment = LOCAL_ALIGNMENT (type, alignment);
421 alignment /= BITS_PER_UNIT;
423 else if (align == -1)
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
430 else
431 alignment = align / BITS_PER_UNIT;
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
443 /* Calculate how many bytes the start of local variables is off from
444 stack alignment. */
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
453 || mode != BLKmode
454 || size != 0)
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
464 + frame_phase);
465 else
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
469 + frame_phase);
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
475 bigend_correction = size - GET_MODE_SIZE (mode);
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
479 if (function == cfun && virtuals_instantiated)
480 addr = plus_constant (frame_pointer_rtx,
481 trunc_int_for_mode
482 (frame_offset + bigend_correction
483 + STARTING_FRAME_OFFSET, Pmode));
484 else
485 addr = plus_constant (virtual_stack_vars_rtx,
486 trunc_int_for_mode
487 (function->x_frame_offset + bigend_correction,
488 Pmode));
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
493 x = gen_rtx_MEM (mode, addr);
494 MEM_NOTRAP_P (x) = 1;
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
502 return x;
505 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
506 current function. */
509 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
511 return assign_stack_local_1 (mode, size, align, cfun);
515 /* Removes temporary slot TEMP from LIST. */
517 static void
518 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
520 if (temp->next)
521 temp->next->prev = temp->prev;
522 if (temp->prev)
523 temp->prev->next = temp->next;
524 else
525 *list = temp->next;
527 temp->prev = temp->next = NULL;
530 /* Inserts temporary slot TEMP to LIST. */
532 static void
533 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
535 temp->next = *list;
536 if (*list)
537 (*list)->prev = temp;
538 temp->prev = NULL;
539 *list = temp;
542 /* Returns the list of used temp slots at LEVEL. */
544 static struct temp_slot **
545 temp_slots_at_level (int level)
547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
549 size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
550 temp_slot_p *p;
552 VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
553 p = VEC_address (temp_slot_p, used_temp_slots);
554 memset (&p[old_length], 0,
555 sizeof (temp_slot_p) * (level + 1 - old_length));
558 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
561 /* Returns the maximal temporary slot level. */
563 static int
564 max_slot_level (void)
566 if (!used_temp_slots)
567 return -1;
569 return VEC_length (temp_slot_p, used_temp_slots) - 1;
572 /* Moves temporary slot TEMP to LEVEL. */
574 static void
575 move_slot_to_level (struct temp_slot *temp, int level)
577 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
578 insert_slot_to_list (temp, temp_slots_at_level (level));
579 temp->level = level;
582 /* Make temporary slot TEMP available. */
584 static void
585 make_slot_available (struct temp_slot *temp)
587 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
588 insert_slot_to_list (temp, &avail_temp_slots);
589 temp->in_use = 0;
590 temp->level = -1;
593 /* Allocate a temporary stack slot and record it for possible later
594 reuse.
596 MODE is the machine mode to be given to the returned rtx.
598 SIZE is the size in units of the space required. We do no rounding here
599 since assign_stack_local will do any required rounding.
601 KEEP is 1 if this slot is to be retained after a call to
602 free_temp_slots. Automatic variables for a block are allocated
603 with this flag. KEEP values of 2 or 3 were needed respectively
604 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
605 or for SAVE_EXPRs, but they are now unused.
607 TYPE is the type that will be used for the stack slot. */
610 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
611 int keep, tree type)
613 unsigned int align;
614 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
615 rtx slot;
617 /* If SIZE is -1 it means that somebody tried to allocate a temporary
618 of a variable size. */
619 gcc_assert (size != -1);
621 /* These are now unused. */
622 gcc_assert (keep <= 1);
624 if (mode == BLKmode)
625 align = BIGGEST_ALIGNMENT;
626 else
627 align = GET_MODE_ALIGNMENT (mode);
629 if (! type)
630 type = lang_hooks.types.type_for_mode (mode, 0);
632 if (type)
633 align = LOCAL_ALIGNMENT (type, align);
635 /* Try to find an available, already-allocated temporary of the proper
636 mode which meets the size and alignment requirements. Choose the
637 smallest one with the closest alignment.
639 If assign_stack_temp is called outside of the tree->rtl expansion,
640 we cannot reuse the stack slots (that may still refer to
641 VIRTUAL_STACK_VARS_REGNUM). */
642 if (!virtuals_instantiated)
644 for (p = avail_temp_slots; p; p = p->next)
646 if (p->align >= align && p->size >= size
647 && GET_MODE (p->slot) == mode
648 && objects_must_conflict_p (p->type, type)
649 && (best_p == 0 || best_p->size > p->size
650 || (best_p->size == p->size && best_p->align > p->align)))
652 if (p->align == align && p->size == size)
654 selected = p;
655 cut_slot_from_list (selected, &avail_temp_slots);
656 best_p = 0;
657 break;
659 best_p = p;
664 /* Make our best, if any, the one to use. */
665 if (best_p)
667 selected = best_p;
668 cut_slot_from_list (selected, &avail_temp_slots);
670 /* If there are enough aligned bytes left over, make them into a new
671 temp_slot so that the extra bytes don't get wasted. Do this only
672 for BLKmode slots, so that we can be sure of the alignment. */
673 if (GET_MODE (best_p->slot) == BLKmode)
675 int alignment = best_p->align / BITS_PER_UNIT;
676 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
678 if (best_p->size - rounded_size >= alignment)
680 p = ggc_alloc (sizeof (struct temp_slot));
681 p->in_use = p->addr_taken = 0;
682 p->size = best_p->size - rounded_size;
683 p->base_offset = best_p->base_offset + rounded_size;
684 p->full_size = best_p->full_size - rounded_size;
685 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
686 p->align = best_p->align;
687 p->address = 0;
688 p->type = best_p->type;
689 insert_slot_to_list (p, &avail_temp_slots);
691 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
692 stack_slot_list);
694 best_p->size = rounded_size;
695 best_p->full_size = rounded_size;
700 /* If we still didn't find one, make a new temporary. */
701 if (selected == 0)
703 HOST_WIDE_INT frame_offset_old = frame_offset;
705 p = ggc_alloc (sizeof (struct temp_slot));
707 /* We are passing an explicit alignment request to assign_stack_local.
708 One side effect of that is assign_stack_local will not round SIZE
709 to ensure the frame offset remains suitably aligned.
711 So for requests which depended on the rounding of SIZE, we go ahead
712 and round it now. We also make sure ALIGNMENT is at least
713 BIGGEST_ALIGNMENT. */
714 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
715 p->slot = assign_stack_local (mode,
716 (mode == BLKmode
717 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
718 : size),
719 align);
721 p->align = align;
723 /* The following slot size computation is necessary because we don't
724 know the actual size of the temporary slot until assign_stack_local
725 has performed all the frame alignment and size rounding for the
726 requested temporary. Note that extra space added for alignment
727 can be either above or below this stack slot depending on which
728 way the frame grows. We include the extra space if and only if it
729 is above this slot. */
730 if (FRAME_GROWS_DOWNWARD)
731 p->size = frame_offset_old - frame_offset;
732 else
733 p->size = size;
735 /* Now define the fields used by combine_temp_slots. */
736 if (FRAME_GROWS_DOWNWARD)
738 p->base_offset = frame_offset;
739 p->full_size = frame_offset_old - frame_offset;
741 else
743 p->base_offset = frame_offset_old;
744 p->full_size = frame_offset - frame_offset_old;
746 p->address = 0;
748 selected = p;
751 p = selected;
752 p->in_use = 1;
753 p->addr_taken = 0;
754 p->type = type;
755 p->level = temp_slot_level;
756 p->keep = keep;
758 pp = temp_slots_at_level (p->level);
759 insert_slot_to_list (p, pp);
761 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
762 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
763 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
765 /* If we know the alias set for the memory that will be used, use
766 it. If there's no TYPE, then we don't know anything about the
767 alias set for the memory. */
768 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
769 set_mem_align (slot, align);
771 /* If a type is specified, set the relevant flags. */
772 if (type != 0)
774 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
775 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
777 MEM_NOTRAP_P (slot) = 1;
779 return slot;
782 /* Allocate a temporary stack slot and record it for possible later
783 reuse. First three arguments are same as in preceding function. */
786 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
788 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
791 /* Assign a temporary.
792 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
793 and so that should be used in error messages. In either case, we
794 allocate of the given type.
795 KEEP is as for assign_stack_temp.
796 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
797 it is 0 if a register is OK.
798 DONT_PROMOTE is 1 if we should not promote values in register
799 to wider modes. */
802 assign_temp (tree type_or_decl, int keep, int memory_required,
803 int dont_promote ATTRIBUTE_UNUSED)
805 tree type, decl;
806 enum machine_mode mode;
807 #ifdef PROMOTE_MODE
808 int unsignedp;
809 #endif
811 if (DECL_P (type_or_decl))
812 decl = type_or_decl, type = TREE_TYPE (decl);
813 else
814 decl = NULL, type = type_or_decl;
816 mode = TYPE_MODE (type);
817 #ifdef PROMOTE_MODE
818 unsignedp = TYPE_UNSIGNED (type);
819 #endif
821 if (mode == BLKmode || memory_required)
823 HOST_WIDE_INT size = int_size_in_bytes (type);
824 rtx tmp;
826 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
827 problems with allocating the stack space. */
828 if (size == 0)
829 size = 1;
831 /* Unfortunately, we don't yet know how to allocate variable-sized
832 temporaries. However, sometimes we can find a fixed upper limit on
833 the size, so try that instead. */
834 else if (size == -1)
835 size = max_int_size_in_bytes (type);
837 /* The size of the temporary may be too large to fit into an integer. */
838 /* ??? Not sure this should happen except for user silliness, so limit
839 this to things that aren't compiler-generated temporaries. The
840 rest of the time we'll die in assign_stack_temp_for_type. */
841 if (decl && size == -1
842 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
844 error ("size of variable %q+D is too large", decl);
845 size = 1;
848 tmp = assign_stack_temp_for_type (mode, size, keep, type);
849 return tmp;
852 #ifdef PROMOTE_MODE
853 if (! dont_promote)
854 mode = promote_mode (type, mode, &unsignedp, 0);
855 #endif
857 return gen_reg_rtx (mode);
860 /* Combine temporary stack slots which are adjacent on the stack.
862 This allows for better use of already allocated stack space. This is only
863 done for BLKmode slots because we can be sure that we won't have alignment
864 problems in this case. */
866 static void
867 combine_temp_slots (void)
869 struct temp_slot *p, *q, *next, *next_q;
870 int num_slots;
872 /* We can't combine slots, because the information about which slot
873 is in which alias set will be lost. */
874 if (flag_strict_aliasing)
875 return;
877 /* If there are a lot of temp slots, don't do anything unless
878 high levels of optimization. */
879 if (! flag_expensive_optimizations)
880 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
881 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
882 return;
884 for (p = avail_temp_slots; p; p = next)
886 int delete_p = 0;
888 next = p->next;
890 if (GET_MODE (p->slot) != BLKmode)
891 continue;
893 for (q = p->next; q; q = next_q)
895 int delete_q = 0;
897 next_q = q->next;
899 if (GET_MODE (q->slot) != BLKmode)
900 continue;
902 if (p->base_offset + p->full_size == q->base_offset)
904 /* Q comes after P; combine Q into P. */
905 p->size += q->size;
906 p->full_size += q->full_size;
907 delete_q = 1;
909 else if (q->base_offset + q->full_size == p->base_offset)
911 /* P comes after Q; combine P into Q. */
912 q->size += p->size;
913 q->full_size += p->full_size;
914 delete_p = 1;
915 break;
917 if (delete_q)
918 cut_slot_from_list (q, &avail_temp_slots);
921 /* Either delete P or advance past it. */
922 if (delete_p)
923 cut_slot_from_list (p, &avail_temp_slots);
927 /* Find the temp slot corresponding to the object at address X. */
929 static struct temp_slot *
930 find_temp_slot_from_address (rtx x)
932 struct temp_slot *p;
933 rtx next;
934 int i;
936 for (i = max_slot_level (); i >= 0; i--)
937 for (p = *temp_slots_at_level (i); p; p = p->next)
939 if (XEXP (p->slot, 0) == x
940 || p->address == x
941 || (GET_CODE (x) == PLUS
942 && XEXP (x, 0) == virtual_stack_vars_rtx
943 && GET_CODE (XEXP (x, 1)) == CONST_INT
944 && INTVAL (XEXP (x, 1)) >= p->base_offset
945 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
946 return p;
948 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
949 for (next = p->address; next; next = XEXP (next, 1))
950 if (XEXP (next, 0) == x)
951 return p;
954 /* If we have a sum involving a register, see if it points to a temp
955 slot. */
956 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
957 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
958 return p;
959 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
960 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
961 return p;
963 return 0;
966 /* Indicate that NEW is an alternate way of referring to the temp slot
967 that previously was known by OLD. */
969 void
970 update_temp_slot_address (rtx old, rtx new)
972 struct temp_slot *p;
974 if (rtx_equal_p (old, new))
975 return;
977 p = find_temp_slot_from_address (old);
979 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
980 is a register, see if one operand of the PLUS is a temporary
981 location. If so, NEW points into it. Otherwise, if both OLD and
982 NEW are a PLUS and if there is a register in common between them.
983 If so, try a recursive call on those values. */
984 if (p == 0)
986 if (GET_CODE (old) != PLUS)
987 return;
989 if (REG_P (new))
991 update_temp_slot_address (XEXP (old, 0), new);
992 update_temp_slot_address (XEXP (old, 1), new);
993 return;
995 else if (GET_CODE (new) != PLUS)
996 return;
998 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
999 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1000 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1003 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1004 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1007 return;
1010 /* Otherwise add an alias for the temp's address. */
1011 else if (p->address == 0)
1012 p->address = new;
1013 else
1015 if (GET_CODE (p->address) != EXPR_LIST)
1016 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1022 /* If X could be a reference to a temporary slot, mark the fact that its
1023 address was taken. */
1025 void
1026 mark_temp_addr_taken (rtx x)
1028 struct temp_slot *p;
1030 if (x == 0)
1031 return;
1033 /* If X is not in memory or is at a constant address, it cannot be in
1034 a temporary slot. */
1035 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1036 return;
1038 p = find_temp_slot_from_address (XEXP (x, 0));
1039 if (p != 0)
1040 p->addr_taken = 1;
1043 /* If X could be a reference to a temporary slot, mark that slot as
1044 belonging to the to one level higher than the current level. If X
1045 matched one of our slots, just mark that one. Otherwise, we can't
1046 easily predict which it is, so upgrade all of them. Kept slots
1047 need not be touched.
1049 This is called when an ({...}) construct occurs and a statement
1050 returns a value in memory. */
1052 void
1053 preserve_temp_slots (rtx x)
1055 struct temp_slot *p = 0, *next;
1057 /* If there is no result, we still might have some objects whose address
1058 were taken, so we need to make sure they stay around. */
1059 if (x == 0)
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1063 next = p->next;
1065 if (p->addr_taken)
1066 move_slot_to_level (p, temp_slot_level - 1);
1069 return;
1072 /* If X is a register that is being used as a pointer, see if we have
1073 a temporary slot we know it points to. To be consistent with
1074 the code below, we really should preserve all non-kept slots
1075 if we can't find a match, but that seems to be much too costly. */
1076 if (REG_P (x) && REG_POINTER (x))
1077 p = find_temp_slot_from_address (x);
1079 /* If X is not in memory or is at a constant address, it cannot be in
1080 a temporary slot, but it can contain something whose address was
1081 taken. */
1082 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1084 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1086 next = p->next;
1088 if (p->addr_taken)
1089 move_slot_to_level (p, temp_slot_level - 1);
1092 return;
1095 /* First see if we can find a match. */
1096 if (p == 0)
1097 p = find_temp_slot_from_address (XEXP (x, 0));
1099 if (p != 0)
1101 /* Move everything at our level whose address was taken to our new
1102 level in case we used its address. */
1103 struct temp_slot *q;
1105 if (p->level == temp_slot_level)
1107 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1109 next = q->next;
1111 if (p != q && q->addr_taken)
1112 move_slot_to_level (q, temp_slot_level - 1);
1115 move_slot_to_level (p, temp_slot_level - 1);
1116 p->addr_taken = 0;
1118 return;
1121 /* Otherwise, preserve all non-kept slots at this level. */
1122 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1124 next = p->next;
1126 if (!p->keep)
1127 move_slot_to_level (p, temp_slot_level - 1);
1131 /* Free all temporaries used so far. This is normally called at the
1132 end of generating code for a statement. */
1134 void
1135 free_temp_slots (void)
1137 struct temp_slot *p, *next;
1139 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1141 next = p->next;
1143 if (!p->keep)
1144 make_slot_available (p);
1147 combine_temp_slots ();
1150 /* Push deeper into the nesting level for stack temporaries. */
1152 void
1153 push_temp_slots (void)
1155 temp_slot_level++;
1158 /* Pop a temporary nesting level. All slots in use in the current level
1159 are freed. */
1161 void
1162 pop_temp_slots (void)
1164 struct temp_slot *p, *next;
1166 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1168 next = p->next;
1169 make_slot_available (p);
1172 combine_temp_slots ();
1174 temp_slot_level--;
1177 /* Initialize temporary slots. */
1179 void
1180 init_temp_slots (void)
1182 /* We have not allocated any temporaries yet. */
1183 avail_temp_slots = 0;
1184 used_temp_slots = 0;
1185 temp_slot_level = 0;
1188 /* These routines are responsible for converting virtual register references
1189 to the actual hard register references once RTL generation is complete.
1191 The following four variables are used for communication between the
1192 routines. They contain the offsets of the virtual registers from their
1193 respective hard registers. */
1195 static int in_arg_offset;
1196 static int var_offset;
1197 static int dynamic_offset;
1198 static int out_arg_offset;
1199 static int cfa_offset;
1201 /* In most machines, the stack pointer register is equivalent to the bottom
1202 of the stack. */
1204 #ifndef STACK_POINTER_OFFSET
1205 #define STACK_POINTER_OFFSET 0
1206 #endif
1208 /* If not defined, pick an appropriate default for the offset of dynamically
1209 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1210 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1212 #ifndef STACK_DYNAMIC_OFFSET
1214 /* The bottom of the stack points to the actual arguments. If
1215 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1216 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1217 stack space for register parameters is not pushed by the caller, but
1218 rather part of the fixed stack areas and hence not included in
1219 `current_function_outgoing_args_size'. Nevertheless, we must allow
1220 for it when allocating stack dynamic objects. */
1222 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1223 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1224 ((ACCUMULATE_OUTGOING_ARGS \
1225 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1226 + (STACK_POINTER_OFFSET)) \
1228 #else
1229 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1230 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1231 + (STACK_POINTER_OFFSET))
1232 #endif
1233 #endif
1236 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1237 is a virtual register, return the equivalent hard register and set the
1238 offset indirectly through the pointer. Otherwise, return 0. */
1240 static rtx
1241 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1243 rtx new;
1244 HOST_WIDE_INT offset;
1246 if (x == virtual_incoming_args_rtx)
1247 new = arg_pointer_rtx, offset = in_arg_offset;
1248 else if (x == virtual_stack_vars_rtx)
1249 new = frame_pointer_rtx, offset = var_offset;
1250 else if (x == virtual_stack_dynamic_rtx)
1251 new = stack_pointer_rtx, offset = dynamic_offset;
1252 else if (x == virtual_outgoing_args_rtx)
1253 new = stack_pointer_rtx, offset = out_arg_offset;
1254 else if (x == virtual_cfa_rtx)
1256 #ifdef FRAME_POINTER_CFA_OFFSET
1257 new = frame_pointer_rtx;
1258 #else
1259 new = arg_pointer_rtx;
1260 #endif
1261 offset = cfa_offset;
1263 else
1264 return NULL_RTX;
1266 *poffset = offset;
1267 return new;
1270 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1271 Instantiate any virtual registers present inside of *LOC. The expression
1272 is simplified, as much as possible, but is not to be considered "valid"
1273 in any sense implied by the target. If any change is made, set CHANGED
1274 to true. */
1276 static int
1277 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1279 HOST_WIDE_INT offset;
1280 bool *changed = (bool *) data;
1281 rtx x, new;
1283 x = *loc;
1284 if (x == 0)
1285 return 0;
1287 switch (GET_CODE (x))
1289 case REG:
1290 new = instantiate_new_reg (x, &offset);
1291 if (new)
1293 *loc = plus_constant (new, offset);
1294 if (changed)
1295 *changed = true;
1297 return -1;
1299 case PLUS:
1300 new = instantiate_new_reg (XEXP (x, 0), &offset);
1301 if (new)
1303 new = plus_constant (new, offset);
1304 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1305 if (changed)
1306 *changed = true;
1307 return -1;
1310 /* FIXME -- from old code */
1311 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1312 we can commute the PLUS and SUBREG because pointers into the
1313 frame are well-behaved. */
1314 break;
1316 default:
1317 break;
1320 return 0;
1323 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1324 matches the predicate for insn CODE operand OPERAND. */
1326 static int
1327 safe_insn_predicate (int code, int operand, rtx x)
1329 const struct insn_operand_data *op_data;
1331 if (code < 0)
1332 return true;
1334 op_data = &insn_data[code].operand[operand];
1335 if (op_data->predicate == NULL)
1336 return true;
1338 return op_data->predicate (x, op_data->mode);
1341 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1342 registers present inside of insn. The result will be a valid insn. */
1344 static void
1345 instantiate_virtual_regs_in_insn (rtx insn)
1347 HOST_WIDE_INT offset;
1348 int insn_code, i;
1349 bool any_change = false;
1350 rtx set, new, x, seq;
1352 /* There are some special cases to be handled first. */
1353 set = single_set (insn);
1354 if (set)
1356 /* We're allowed to assign to a virtual register. This is interpreted
1357 to mean that the underlying register gets assigned the inverse
1358 transformation. This is used, for example, in the handling of
1359 non-local gotos. */
1360 new = instantiate_new_reg (SET_DEST (set), &offset);
1361 if (new)
1363 start_sequence ();
1365 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1366 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1367 GEN_INT (-offset));
1368 x = force_operand (x, new);
1369 if (x != new)
1370 emit_move_insn (new, x);
1372 seq = get_insns ();
1373 end_sequence ();
1375 emit_insn_before (seq, insn);
1376 delete_insn (insn);
1377 return;
1380 /* Handle a straight copy from a virtual register by generating a
1381 new add insn. The difference between this and falling through
1382 to the generic case is avoiding a new pseudo and eliminating a
1383 move insn in the initial rtl stream. */
1384 new = instantiate_new_reg (SET_SRC (set), &offset);
1385 if (new && offset != 0
1386 && REG_P (SET_DEST (set))
1387 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1389 start_sequence ();
1391 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1392 new, GEN_INT (offset), SET_DEST (set),
1393 1, OPTAB_LIB_WIDEN);
1394 if (x != SET_DEST (set))
1395 emit_move_insn (SET_DEST (set), x);
1397 seq = get_insns ();
1398 end_sequence ();
1400 emit_insn_before (seq, insn);
1401 delete_insn (insn);
1402 return;
1405 extract_insn (insn);
1406 insn_code = INSN_CODE (insn);
1408 /* Handle a plus involving a virtual register by determining if the
1409 operands remain valid if they're modified in place. */
1410 if (GET_CODE (SET_SRC (set)) == PLUS
1411 && recog_data.n_operands >= 3
1412 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1413 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1414 && GET_CODE (recog_data.operand[2]) == CONST_INT
1415 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1417 offset += INTVAL (recog_data.operand[2]);
1419 /* If the sum is zero, then replace with a plain move. */
1420 if (offset == 0
1421 && REG_P (SET_DEST (set))
1422 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1424 start_sequence ();
1425 emit_move_insn (SET_DEST (set), new);
1426 seq = get_insns ();
1427 end_sequence ();
1429 emit_insn_before (seq, insn);
1430 delete_insn (insn);
1431 return;
1434 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1436 /* Using validate_change and apply_change_group here leaves
1437 recog_data in an invalid state. Since we know exactly what
1438 we want to check, do those two by hand. */
1439 if (safe_insn_predicate (insn_code, 1, new)
1440 && safe_insn_predicate (insn_code, 2, x))
1442 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1443 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1444 any_change = true;
1446 /* Fall through into the regular operand fixup loop in
1447 order to take care of operands other than 1 and 2. */
1451 else
1453 extract_insn (insn);
1454 insn_code = INSN_CODE (insn);
1457 /* In the general case, we expect virtual registers to appear only in
1458 operands, and then only as either bare registers or inside memories. */
1459 for (i = 0; i < recog_data.n_operands; ++i)
1461 x = recog_data.operand[i];
1462 switch (GET_CODE (x))
1464 case MEM:
1466 rtx addr = XEXP (x, 0);
1467 bool changed = false;
1469 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1470 if (!changed)
1471 continue;
1473 start_sequence ();
1474 x = replace_equiv_address (x, addr);
1475 seq = get_insns ();
1476 end_sequence ();
1477 if (seq)
1478 emit_insn_before (seq, insn);
1480 break;
1482 case REG:
1483 new = instantiate_new_reg (x, &offset);
1484 if (new == NULL)
1485 continue;
1486 if (offset == 0)
1487 x = new;
1488 else
1490 start_sequence ();
1492 /* Careful, special mode predicates may have stuff in
1493 insn_data[insn_code].operand[i].mode that isn't useful
1494 to us for computing a new value. */
1495 /* ??? Recognize address_operand and/or "p" constraints
1496 to see if (plus new offset) is a valid before we put
1497 this through expand_simple_binop. */
1498 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1499 GEN_INT (offset), NULL_RTX,
1500 1, OPTAB_LIB_WIDEN);
1501 seq = get_insns ();
1502 end_sequence ();
1503 emit_insn_before (seq, insn);
1505 break;
1507 case SUBREG:
1508 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1509 if (new == NULL)
1510 continue;
1511 if (offset != 0)
1513 start_sequence ();
1514 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1515 GEN_INT (offset), NULL_RTX,
1516 1, OPTAB_LIB_WIDEN);
1517 seq = get_insns ();
1518 end_sequence ();
1519 emit_insn_before (seq, insn);
1521 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1522 GET_MODE (new), SUBREG_BYTE (x));
1523 break;
1525 default:
1526 continue;
1529 /* At this point, X contains the new value for the operand.
1530 Validate the new value vs the insn predicate. Note that
1531 asm insns will have insn_code -1 here. */
1532 if (!safe_insn_predicate (insn_code, i, x))
1533 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1535 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1536 any_change = true;
1539 if (any_change)
1541 /* Propagate operand changes into the duplicates. */
1542 for (i = 0; i < recog_data.n_dups; ++i)
1543 *recog_data.dup_loc[i]
1544 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1546 /* Force re-recognition of the instruction for validation. */
1547 INSN_CODE (insn) = -1;
1550 if (asm_noperands (PATTERN (insn)) >= 0)
1552 if (!check_asm_operands (PATTERN (insn)))
1554 error_for_asm (insn, "impossible constraint in %<asm%>");
1555 delete_insn (insn);
1558 else
1560 if (recog_memoized (insn) < 0)
1561 fatal_insn_not_found (insn);
1565 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1566 do any instantiation required. */
1568 static void
1569 instantiate_decl (rtx x)
1571 rtx addr;
1573 if (x == 0)
1574 return;
1576 /* If this is a CONCAT, recurse for the pieces. */
1577 if (GET_CODE (x) == CONCAT)
1579 instantiate_decl (XEXP (x, 0));
1580 instantiate_decl (XEXP (x, 1));
1581 return;
1584 /* If this is not a MEM, no need to do anything. Similarly if the
1585 address is a constant or a register that is not a virtual register. */
1586 if (!MEM_P (x))
1587 return;
1589 addr = XEXP (x, 0);
1590 if (CONSTANT_P (addr)
1591 || (REG_P (addr)
1592 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1593 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1594 return;
1596 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1599 /* Helper for instantiate_decls called via walk_tree: Process all decls
1600 in the given DECL_VALUE_EXPR. */
1602 static tree
1603 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1605 tree t = *tp;
1606 if (! EXPR_P (t))
1608 *walk_subtrees = 0;
1609 if (DECL_P (t) && DECL_RTL_SET_P (t))
1610 instantiate_decl (DECL_RTL (t));
1612 return NULL;
1615 /* Subroutine of instantiate_decls: Process all decls in the given
1616 BLOCK node and all its subblocks. */
1618 static void
1619 instantiate_decls_1 (tree let)
1621 tree t;
1623 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1625 if (DECL_RTL_SET_P (t))
1626 instantiate_decl (DECL_RTL (t));
1627 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1629 tree v = DECL_VALUE_EXPR (t);
1630 walk_tree (&v, instantiate_expr, NULL, NULL);
1634 /* Process all subblocks. */
1635 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1636 instantiate_decls_1 (t);
1639 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1640 all virtual registers in their DECL_RTL's. */
1642 static void
1643 instantiate_decls (tree fndecl)
1645 tree decl;
1647 /* Process all parameters of the function. */
1648 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1650 instantiate_decl (DECL_RTL (decl));
1651 instantiate_decl (DECL_INCOMING_RTL (decl));
1652 if (DECL_HAS_VALUE_EXPR_P (decl))
1654 tree v = DECL_VALUE_EXPR (decl);
1655 walk_tree (&v, instantiate_expr, NULL, NULL);
1659 /* Now process all variables defined in the function or its subblocks. */
1660 instantiate_decls_1 (DECL_INITIAL (fndecl));
1663 /* Pass through the INSNS of function FNDECL and convert virtual register
1664 references to hard register references. */
1666 static unsigned int
1667 instantiate_virtual_regs (void)
1669 rtx insn;
1671 /* Compute the offsets to use for this function. */
1672 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1673 var_offset = STARTING_FRAME_OFFSET;
1674 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1675 out_arg_offset = STACK_POINTER_OFFSET;
1676 #ifdef FRAME_POINTER_CFA_OFFSET
1677 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1678 #else
1679 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1680 #endif
1682 /* Initialize recognition, indicating that volatile is OK. */
1683 init_recog ();
1685 /* Scan through all the insns, instantiating every virtual register still
1686 present. */
1687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1688 if (INSN_P (insn))
1690 /* These patterns in the instruction stream can never be recognized.
1691 Fortunately, they shouldn't contain virtual registers either. */
1692 if (GET_CODE (PATTERN (insn)) == USE
1693 || GET_CODE (PATTERN (insn)) == CLOBBER
1694 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1695 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1696 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1697 continue;
1699 instantiate_virtual_regs_in_insn (insn);
1701 if (INSN_DELETED_P (insn))
1702 continue;
1704 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1706 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1707 if (GET_CODE (insn) == CALL_INSN)
1708 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1709 instantiate_virtual_regs_in_rtx, NULL);
1712 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1713 instantiate_decls (current_function_decl);
1715 /* Indicate that, from now on, assign_stack_local should use
1716 frame_pointer_rtx. */
1717 virtuals_instantiated = 1;
1718 return 0;
1721 struct tree_opt_pass pass_instantiate_virtual_regs =
1723 "vregs", /* name */
1724 NULL, /* gate */
1725 instantiate_virtual_regs, /* execute */
1726 NULL, /* sub */
1727 NULL, /* next */
1728 0, /* static_pass_number */
1729 0, /* tv_id */
1730 0, /* properties_required */
1731 0, /* properties_provided */
1732 0, /* properties_destroyed */
1733 0, /* todo_flags_start */
1734 TODO_dump_func, /* todo_flags_finish */
1735 0 /* letter */
1739 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1740 This means a type for which function calls must pass an address to the
1741 function or get an address back from the function.
1742 EXP may be a type node or an expression (whose type is tested). */
1745 aggregate_value_p (tree exp, tree fntype)
1747 int i, regno, nregs;
1748 rtx reg;
1750 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1752 /* DECL node associated with FNTYPE when relevant, which we might need to
1753 check for by-invisible-reference returns, typically for CALL_EXPR input
1754 EXPressions. */
1755 tree fndecl = NULL_TREE;
1757 if (fntype)
1758 switch (TREE_CODE (fntype))
1760 case CALL_EXPR:
1761 fndecl = get_callee_fndecl (fntype);
1762 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1763 break;
1764 case FUNCTION_DECL:
1765 fndecl = fntype;
1766 fntype = TREE_TYPE (fndecl);
1767 break;
1768 case FUNCTION_TYPE:
1769 case METHOD_TYPE:
1770 break;
1771 case IDENTIFIER_NODE:
1772 fntype = 0;
1773 break;
1774 default:
1775 /* We don't expect other rtl types here. */
1776 gcc_unreachable ();
1779 if (TREE_CODE (type) == VOID_TYPE)
1780 return 0;
1782 /* If the front end has decided that this needs to be passed by
1783 reference, do so. */
1784 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1785 && DECL_BY_REFERENCE (exp))
1786 return 1;
1788 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1789 called function RESULT_DECL, meaning the function returns in memory by
1790 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1791 on the function type, which used to be the way to request such a return
1792 mechanism but might now be causing troubles at gimplification time if
1793 temporaries with the function type need to be created. */
1794 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1795 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1796 return 1;
1798 if (targetm.calls.return_in_memory (type, fntype))
1799 return 1;
1800 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1801 and thus can't be returned in registers. */
1802 if (TREE_ADDRESSABLE (type))
1803 return 1;
1804 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1805 return 1;
1806 /* Make sure we have suitable call-clobbered regs to return
1807 the value in; if not, we must return it in memory. */
1808 reg = hard_function_value (type, 0, fntype, 0);
1810 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1811 it is OK. */
1812 if (!REG_P (reg))
1813 return 0;
1815 regno = REGNO (reg);
1816 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1817 for (i = 0; i < nregs; i++)
1818 if (! call_used_regs[regno + i])
1819 return 1;
1820 return 0;
1823 /* Return true if we should assign DECL a pseudo register; false if it
1824 should live on the local stack. */
1826 bool
1827 use_register_for_decl (tree decl)
1829 /* Honor volatile. */
1830 if (TREE_SIDE_EFFECTS (decl))
1831 return false;
1833 /* Honor addressability. */
1834 if (TREE_ADDRESSABLE (decl))
1835 return false;
1837 /* Only register-like things go in registers. */
1838 if (DECL_MODE (decl) == BLKmode)
1839 return false;
1841 /* If -ffloat-store specified, don't put explicit float variables
1842 into registers. */
1843 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1844 propagates values across these stores, and it probably shouldn't. */
1845 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1846 return false;
1848 /* If we're not interested in tracking debugging information for
1849 this decl, then we can certainly put it in a register. */
1850 if (DECL_IGNORED_P (decl))
1851 return true;
1853 return (optimize || DECL_REGISTER (decl));
1856 /* Return true if TYPE should be passed by invisible reference. */
1858 bool
1859 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1860 tree type, bool named_arg)
1862 if (type)
1864 /* If this type contains non-trivial constructors, then it is
1865 forbidden for the middle-end to create any new copies. */
1866 if (TREE_ADDRESSABLE (type))
1867 return true;
1869 /* GCC post 3.4 passes *all* variable sized types by reference. */
1870 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1871 return true;
1874 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1877 /* Return true if TYPE, which is passed by reference, should be callee
1878 copied instead of caller copied. */
1880 bool
1881 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1882 tree type, bool named_arg)
1884 if (type && TREE_ADDRESSABLE (type))
1885 return false;
1886 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1889 /* Structures to communicate between the subroutines of assign_parms.
1890 The first holds data persistent across all parameters, the second
1891 is cleared out for each parameter. */
1893 struct assign_parm_data_all
1895 CUMULATIVE_ARGS args_so_far;
1896 struct args_size stack_args_size;
1897 tree function_result_decl;
1898 tree orig_fnargs;
1899 rtx conversion_insns;
1900 HOST_WIDE_INT pretend_args_size;
1901 HOST_WIDE_INT extra_pretend_bytes;
1902 int reg_parm_stack_space;
1905 struct assign_parm_data_one
1907 tree nominal_type;
1908 tree passed_type;
1909 rtx entry_parm;
1910 rtx stack_parm;
1911 enum machine_mode nominal_mode;
1912 enum machine_mode passed_mode;
1913 enum machine_mode promoted_mode;
1914 struct locate_and_pad_arg_data locate;
1915 int partial;
1916 BOOL_BITFIELD named_arg : 1;
1917 BOOL_BITFIELD passed_pointer : 1;
1918 BOOL_BITFIELD on_stack : 1;
1919 BOOL_BITFIELD loaded_in_reg : 1;
1922 /* A subroutine of assign_parms. Initialize ALL. */
1924 static void
1925 assign_parms_initialize_all (struct assign_parm_data_all *all)
1927 tree fntype;
1929 memset (all, 0, sizeof (*all));
1931 fntype = TREE_TYPE (current_function_decl);
1933 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1934 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1935 #else
1936 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1937 current_function_decl, -1);
1938 #endif
1940 #ifdef REG_PARM_STACK_SPACE
1941 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1942 #endif
1945 /* If ARGS contains entries with complex types, split the entry into two
1946 entries of the component type. Return a new list of substitutions are
1947 needed, else the old list. */
1949 static tree
1950 split_complex_args (tree args)
1952 tree p;
1954 /* Before allocating memory, check for the common case of no complex. */
1955 for (p = args; p; p = TREE_CHAIN (p))
1957 tree type = TREE_TYPE (p);
1958 if (TREE_CODE (type) == COMPLEX_TYPE
1959 && targetm.calls.split_complex_arg (type))
1960 goto found;
1962 return args;
1964 found:
1965 args = copy_list (args);
1967 for (p = args; p; p = TREE_CHAIN (p))
1969 tree type = TREE_TYPE (p);
1970 if (TREE_CODE (type) == COMPLEX_TYPE
1971 && targetm.calls.split_complex_arg (type))
1973 tree decl;
1974 tree subtype = TREE_TYPE (type);
1975 bool addressable = TREE_ADDRESSABLE (p);
1977 /* Rewrite the PARM_DECL's type with its component. */
1978 TREE_TYPE (p) = subtype;
1979 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1980 DECL_MODE (p) = VOIDmode;
1981 DECL_SIZE (p) = NULL;
1982 DECL_SIZE_UNIT (p) = NULL;
1983 /* If this arg must go in memory, put it in a pseudo here.
1984 We can't allow it to go in memory as per normal parms,
1985 because the usual place might not have the imag part
1986 adjacent to the real part. */
1987 DECL_ARTIFICIAL (p) = addressable;
1988 DECL_IGNORED_P (p) = addressable;
1989 TREE_ADDRESSABLE (p) = 0;
1990 layout_decl (p, 0);
1992 /* Build a second synthetic decl. */
1993 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1994 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1995 DECL_ARTIFICIAL (decl) = addressable;
1996 DECL_IGNORED_P (decl) = addressable;
1997 layout_decl (decl, 0);
1999 /* Splice it in; skip the new decl. */
2000 TREE_CHAIN (decl) = TREE_CHAIN (p);
2001 TREE_CHAIN (p) = decl;
2002 p = decl;
2006 return args;
2009 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2010 the hidden struct return argument, and (abi willing) complex args.
2011 Return the new parameter list. */
2013 static tree
2014 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2016 tree fndecl = current_function_decl;
2017 tree fntype = TREE_TYPE (fndecl);
2018 tree fnargs = DECL_ARGUMENTS (fndecl);
2020 /* If struct value address is treated as the first argument, make it so. */
2021 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2022 && ! current_function_returns_pcc_struct
2023 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2025 tree type = build_pointer_type (TREE_TYPE (fntype));
2026 tree decl;
2028 decl = build_decl (PARM_DECL, NULL_TREE, type);
2029 DECL_ARG_TYPE (decl) = type;
2030 DECL_ARTIFICIAL (decl) = 1;
2031 DECL_IGNORED_P (decl) = 1;
2033 TREE_CHAIN (decl) = fnargs;
2034 fnargs = decl;
2035 all->function_result_decl = decl;
2038 all->orig_fnargs = fnargs;
2040 /* If the target wants to split complex arguments into scalars, do so. */
2041 if (targetm.calls.split_complex_arg)
2042 fnargs = split_complex_args (fnargs);
2044 return fnargs;
2047 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2048 data for the parameter. Incorporate ABI specifics such as pass-by-
2049 reference and type promotion. */
2051 static void
2052 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2053 struct assign_parm_data_one *data)
2055 tree nominal_type, passed_type;
2056 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2058 memset (data, 0, sizeof (*data));
2060 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2061 if (!current_function_stdarg)
2062 data->named_arg = 1; /* No varadic parms. */
2063 else if (TREE_CHAIN (parm))
2064 data->named_arg = 1; /* Not the last non-varadic parm. */
2065 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2066 data->named_arg = 1; /* Only varadic ones are unnamed. */
2067 else
2068 data->named_arg = 0; /* Treat as varadic. */
2070 nominal_type = TREE_TYPE (parm);
2071 passed_type = DECL_ARG_TYPE (parm);
2073 /* Look out for errors propagating this far. Also, if the parameter's
2074 type is void then its value doesn't matter. */
2075 if (TREE_TYPE (parm) == error_mark_node
2076 /* This can happen after weird syntax errors
2077 or if an enum type is defined among the parms. */
2078 || TREE_CODE (parm) != PARM_DECL
2079 || passed_type == NULL
2080 || VOID_TYPE_P (nominal_type))
2082 nominal_type = passed_type = void_type_node;
2083 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2084 goto egress;
2087 /* Find mode of arg as it is passed, and mode of arg as it should be
2088 during execution of this function. */
2089 passed_mode = TYPE_MODE (passed_type);
2090 nominal_mode = TYPE_MODE (nominal_type);
2092 /* If the parm is to be passed as a transparent union, use the type of
2093 the first field for the tests below. We have already verified that
2094 the modes are the same. */
2095 if (TREE_CODE (passed_type) == UNION_TYPE
2096 && TYPE_TRANSPARENT_UNION (passed_type))
2097 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2099 /* See if this arg was passed by invisible reference. */
2100 if (pass_by_reference (&all->args_so_far, passed_mode,
2101 passed_type, data->named_arg))
2103 passed_type = nominal_type = build_pointer_type (passed_type);
2104 data->passed_pointer = true;
2105 passed_mode = nominal_mode = Pmode;
2108 /* Find mode as it is passed by the ABI. */
2109 promoted_mode = passed_mode;
2110 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2112 int unsignedp = TYPE_UNSIGNED (passed_type);
2113 promoted_mode = promote_mode (passed_type, promoted_mode,
2114 &unsignedp, 1);
2117 egress:
2118 data->nominal_type = nominal_type;
2119 data->passed_type = passed_type;
2120 data->nominal_mode = nominal_mode;
2121 data->passed_mode = passed_mode;
2122 data->promoted_mode = promoted_mode;
2125 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2127 static void
2128 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2129 struct assign_parm_data_one *data, bool no_rtl)
2131 int varargs_pretend_bytes = 0;
2133 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2134 data->promoted_mode,
2135 data->passed_type,
2136 &varargs_pretend_bytes, no_rtl);
2138 /* If the back-end has requested extra stack space, record how much is
2139 needed. Do not change pretend_args_size otherwise since it may be
2140 nonzero from an earlier partial argument. */
2141 if (varargs_pretend_bytes > 0)
2142 all->pretend_args_size = varargs_pretend_bytes;
2145 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2146 the incoming location of the current parameter. */
2148 static void
2149 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2150 struct assign_parm_data_one *data)
2152 HOST_WIDE_INT pretend_bytes = 0;
2153 rtx entry_parm;
2154 bool in_regs;
2156 if (data->promoted_mode == VOIDmode)
2158 data->entry_parm = data->stack_parm = const0_rtx;
2159 return;
2162 #ifdef FUNCTION_INCOMING_ARG
2163 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2164 data->passed_type, data->named_arg);
2165 #else
2166 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2167 data->passed_type, data->named_arg);
2168 #endif
2170 if (entry_parm == 0)
2171 data->promoted_mode = data->passed_mode;
2173 /* Determine parm's home in the stack, in case it arrives in the stack
2174 or we should pretend it did. Compute the stack position and rtx where
2175 the argument arrives and its size.
2177 There is one complexity here: If this was a parameter that would
2178 have been passed in registers, but wasn't only because it is
2179 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2180 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2181 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2182 as it was the previous time. */
2183 in_regs = entry_parm != 0;
2184 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2185 in_regs = true;
2186 #endif
2187 if (!in_regs && !data->named_arg)
2189 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2191 rtx tem;
2192 #ifdef FUNCTION_INCOMING_ARG
2193 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2194 data->passed_type, true);
2195 #else
2196 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2197 data->passed_type, true);
2198 #endif
2199 in_regs = tem != NULL;
2203 /* If this parameter was passed both in registers and in the stack, use
2204 the copy on the stack. */
2205 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2206 data->passed_type))
2207 entry_parm = 0;
2209 if (entry_parm)
2211 int partial;
2213 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2214 data->promoted_mode,
2215 data->passed_type,
2216 data->named_arg);
2217 data->partial = partial;
2219 /* The caller might already have allocated stack space for the
2220 register parameters. */
2221 if (partial != 0 && all->reg_parm_stack_space == 0)
2223 /* Part of this argument is passed in registers and part
2224 is passed on the stack. Ask the prologue code to extend
2225 the stack part so that we can recreate the full value.
2227 PRETEND_BYTES is the size of the registers we need to store.
2228 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2229 stack space that the prologue should allocate.
2231 Internally, gcc assumes that the argument pointer is aligned
2232 to STACK_BOUNDARY bits. This is used both for alignment
2233 optimizations (see init_emit) and to locate arguments that are
2234 aligned to more than PARM_BOUNDARY bits. We must preserve this
2235 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2236 a stack boundary. */
2238 /* We assume at most one partial arg, and it must be the first
2239 argument on the stack. */
2240 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2242 pretend_bytes = partial;
2243 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2245 /* We want to align relative to the actual stack pointer, so
2246 don't include this in the stack size until later. */
2247 all->extra_pretend_bytes = all->pretend_args_size;
2251 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2252 entry_parm ? data->partial : 0, current_function_decl,
2253 &all->stack_args_size, &data->locate);
2255 /* Adjust offsets to include the pretend args. */
2256 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2257 data->locate.slot_offset.constant += pretend_bytes;
2258 data->locate.offset.constant += pretend_bytes;
2260 data->entry_parm = entry_parm;
2263 /* A subroutine of assign_parms. If there is actually space on the stack
2264 for this parm, count it in stack_args_size and return true. */
2266 static bool
2267 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2268 struct assign_parm_data_one *data)
2270 /* Trivially true if we've no incoming register. */
2271 if (data->entry_parm == NULL)
2273 /* Also true if we're partially in registers and partially not,
2274 since we've arranged to drop the entire argument on the stack. */
2275 else if (data->partial != 0)
2277 /* Also true if the target says that it's passed in both registers
2278 and on the stack. */
2279 else if (GET_CODE (data->entry_parm) == PARALLEL
2280 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2282 /* Also true if the target says that there's stack allocated for
2283 all register parameters. */
2284 else if (all->reg_parm_stack_space > 0)
2286 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2287 else
2288 return false;
2290 all->stack_args_size.constant += data->locate.size.constant;
2291 if (data->locate.size.var)
2292 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2294 return true;
2297 /* A subroutine of assign_parms. Given that this parameter is allocated
2298 stack space by the ABI, find it. */
2300 static void
2301 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2303 rtx offset_rtx, stack_parm;
2304 unsigned int align, boundary;
2306 /* If we're passing this arg using a reg, make its stack home the
2307 aligned stack slot. */
2308 if (data->entry_parm)
2309 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2310 else
2311 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2313 stack_parm = current_function_internal_arg_pointer;
2314 if (offset_rtx != const0_rtx)
2315 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2316 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2318 set_mem_attributes (stack_parm, parm, 1);
2320 boundary = data->locate.boundary;
2321 align = BITS_PER_UNIT;
2323 /* If we're padding upward, we know that the alignment of the slot
2324 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2325 intentionally forcing upward padding. Otherwise we have to come
2326 up with a guess at the alignment based on OFFSET_RTX. */
2327 if (data->locate.where_pad != downward || data->entry_parm)
2328 align = boundary;
2329 else if (GET_CODE (offset_rtx) == CONST_INT)
2331 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2332 align = align & -align;
2334 set_mem_align (stack_parm, align);
2336 if (data->entry_parm)
2337 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2339 data->stack_parm = stack_parm;
2342 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2343 always valid and contiguous. */
2345 static void
2346 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2348 rtx entry_parm = data->entry_parm;
2349 rtx stack_parm = data->stack_parm;
2351 /* If this parm was passed part in regs and part in memory, pretend it
2352 arrived entirely in memory by pushing the register-part onto the stack.
2353 In the special case of a DImode or DFmode that is split, we could put
2354 it together in a pseudoreg directly, but for now that's not worth
2355 bothering with. */
2356 if (data->partial != 0)
2358 /* Handle calls that pass values in multiple non-contiguous
2359 locations. The Irix 6 ABI has examples of this. */
2360 if (GET_CODE (entry_parm) == PARALLEL)
2361 emit_group_store (validize_mem (stack_parm), entry_parm,
2362 data->passed_type,
2363 int_size_in_bytes (data->passed_type));
2364 else
2366 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2367 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2368 data->partial / UNITS_PER_WORD);
2371 entry_parm = stack_parm;
2374 /* If we didn't decide this parm came in a register, by default it came
2375 on the stack. */
2376 else if (entry_parm == NULL)
2377 entry_parm = stack_parm;
2379 /* When an argument is passed in multiple locations, we can't make use
2380 of this information, but we can save some copying if the whole argument
2381 is passed in a single register. */
2382 else if (GET_CODE (entry_parm) == PARALLEL
2383 && data->nominal_mode != BLKmode
2384 && data->passed_mode != BLKmode)
2386 size_t i, len = XVECLEN (entry_parm, 0);
2388 for (i = 0; i < len; i++)
2389 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2390 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2391 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2392 == data->passed_mode)
2393 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2395 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2396 break;
2400 data->entry_parm = entry_parm;
2403 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2404 always valid and properly aligned. */
2406 static void
2407 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2409 rtx stack_parm = data->stack_parm;
2411 /* If we can't trust the parm stack slot to be aligned enough for its
2412 ultimate type, don't use that slot after entry. We'll make another
2413 stack slot, if we need one. */
2414 if (stack_parm
2415 && ((STRICT_ALIGNMENT
2416 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2417 || (data->nominal_type
2418 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2419 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2420 stack_parm = NULL;
2422 /* If parm was passed in memory, and we need to convert it on entry,
2423 don't store it back in that same slot. */
2424 else if (data->entry_parm == stack_parm
2425 && data->nominal_mode != BLKmode
2426 && data->nominal_mode != data->passed_mode)
2427 stack_parm = NULL;
2429 /* If stack protection is in effect for this function, don't leave any
2430 pointers in their passed stack slots. */
2431 else if (cfun->stack_protect_guard
2432 && (flag_stack_protect == 2
2433 || data->passed_pointer
2434 || POINTER_TYPE_P (data->nominal_type)))
2435 stack_parm = NULL;
2437 data->stack_parm = stack_parm;
2440 /* A subroutine of assign_parms. Return true if the current parameter
2441 should be stored as a BLKmode in the current frame. */
2443 static bool
2444 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2446 if (data->nominal_mode == BLKmode)
2447 return true;
2448 if (GET_CODE (data->entry_parm) == PARALLEL)
2449 return true;
2451 #ifdef BLOCK_REG_PADDING
2452 /* Only assign_parm_setup_block knows how to deal with register arguments
2453 that are padded at the least significant end. */
2454 if (REG_P (data->entry_parm)
2455 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2456 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2457 == (BYTES_BIG_ENDIAN ? upward : downward)))
2458 return true;
2459 #endif
2461 return false;
2464 /* A subroutine of assign_parms. Arrange for the parameter to be
2465 present and valid in DATA->STACK_RTL. */
2467 static void
2468 assign_parm_setup_block (struct assign_parm_data_all *all,
2469 tree parm, struct assign_parm_data_one *data)
2471 rtx entry_parm = data->entry_parm;
2472 rtx stack_parm = data->stack_parm;
2473 HOST_WIDE_INT size;
2474 HOST_WIDE_INT size_stored;
2475 rtx orig_entry_parm = entry_parm;
2477 if (GET_CODE (entry_parm) == PARALLEL)
2478 entry_parm = emit_group_move_into_temps (entry_parm);
2480 /* If we've a non-block object that's nevertheless passed in parts,
2481 reconstitute it in register operations rather than on the stack. */
2482 if (GET_CODE (entry_parm) == PARALLEL
2483 && data->nominal_mode != BLKmode)
2485 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2487 if ((XVECLEN (entry_parm, 0) > 1
2488 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2489 && use_register_for_decl (parm))
2491 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2493 push_to_sequence (all->conversion_insns);
2495 /* For values returned in multiple registers, handle possible
2496 incompatible calls to emit_group_store.
2498 For example, the following would be invalid, and would have to
2499 be fixed by the conditional below:
2501 emit_group_store ((reg:SF), (parallel:DF))
2502 emit_group_store ((reg:SI), (parallel:DI))
2504 An example of this are doubles in e500 v2:
2505 (parallel:DF (expr_list (reg:SI) (const_int 0))
2506 (expr_list (reg:SI) (const_int 4))). */
2507 if (data->nominal_mode != data->passed_mode)
2509 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2510 emit_group_store (t, entry_parm, NULL_TREE,
2511 GET_MODE_SIZE (GET_MODE (entry_parm)));
2512 convert_move (parmreg, t, 0);
2514 else
2515 emit_group_store (parmreg, entry_parm, data->nominal_type,
2516 int_size_in_bytes (data->nominal_type));
2518 all->conversion_insns = get_insns ();
2519 end_sequence ();
2521 SET_DECL_RTL (parm, parmreg);
2522 return;
2526 size = int_size_in_bytes (data->passed_type);
2527 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2528 if (stack_parm == 0)
2530 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2531 stack_parm = assign_stack_local (BLKmode, size_stored,
2532 DECL_ALIGN (parm));
2533 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2534 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2535 set_mem_attributes (stack_parm, parm, 1);
2538 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2539 calls that pass values in multiple non-contiguous locations. */
2540 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2542 rtx mem;
2544 /* Note that we will be storing an integral number of words.
2545 So we have to be careful to ensure that we allocate an
2546 integral number of words. We do this above when we call
2547 assign_stack_local if space was not allocated in the argument
2548 list. If it was, this will not work if PARM_BOUNDARY is not
2549 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2550 if it becomes a problem. Exception is when BLKmode arrives
2551 with arguments not conforming to word_mode. */
2553 if (data->stack_parm == 0)
2555 else if (GET_CODE (entry_parm) == PARALLEL)
2557 else
2558 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2560 mem = validize_mem (stack_parm);
2562 /* Handle values in multiple non-contiguous locations. */
2563 if (GET_CODE (entry_parm) == PARALLEL)
2565 push_to_sequence (all->conversion_insns);
2566 emit_group_store (mem, entry_parm, data->passed_type, size);
2567 all->conversion_insns = get_insns ();
2568 end_sequence ();
2571 else if (size == 0)
2574 /* If SIZE is that of a mode no bigger than a word, just use
2575 that mode's store operation. */
2576 else if (size <= UNITS_PER_WORD)
2578 enum machine_mode mode
2579 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2581 if (mode != BLKmode
2582 #ifdef BLOCK_REG_PADDING
2583 && (size == UNITS_PER_WORD
2584 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2585 != (BYTES_BIG_ENDIAN ? upward : downward)))
2586 #endif
2589 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2590 emit_move_insn (change_address (mem, mode, 0), reg);
2593 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2594 machine must be aligned to the left before storing
2595 to memory. Note that the previous test doesn't
2596 handle all cases (e.g. SIZE == 3). */
2597 else if (size != UNITS_PER_WORD
2598 #ifdef BLOCK_REG_PADDING
2599 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2600 == downward)
2601 #else
2602 && BYTES_BIG_ENDIAN
2603 #endif
2606 rtx tem, x;
2607 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2608 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2610 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2611 build_int_cst (NULL_TREE, by),
2612 NULL_RTX, 1);
2613 tem = change_address (mem, word_mode, 0);
2614 emit_move_insn (tem, x);
2616 else
2617 move_block_from_reg (REGNO (entry_parm), mem,
2618 size_stored / UNITS_PER_WORD);
2620 else
2621 move_block_from_reg (REGNO (entry_parm), mem,
2622 size_stored / UNITS_PER_WORD);
2624 else if (data->stack_parm == 0)
2626 push_to_sequence (all->conversion_insns);
2627 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2628 BLOCK_OP_NORMAL);
2629 all->conversion_insns = get_insns ();
2630 end_sequence ();
2633 data->stack_parm = stack_parm;
2634 SET_DECL_RTL (parm, stack_parm);
2637 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2638 parameter. Get it there. Perform all ABI specified conversions. */
2640 static void
2641 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2642 struct assign_parm_data_one *data)
2644 rtx parmreg;
2645 enum machine_mode promoted_nominal_mode;
2646 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2647 bool did_conversion = false;
2649 /* Store the parm in a pseudoregister during the function, but we may
2650 need to do it in a wider mode. */
2652 /* This is not really promoting for a call. However we need to be
2653 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2654 promoted_nominal_mode
2655 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2657 parmreg = gen_reg_rtx (promoted_nominal_mode);
2659 if (!DECL_ARTIFICIAL (parm))
2660 mark_user_reg (parmreg);
2662 /* If this was an item that we received a pointer to,
2663 set DECL_RTL appropriately. */
2664 if (data->passed_pointer)
2666 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2667 set_mem_attributes (x, parm, 1);
2668 SET_DECL_RTL (parm, x);
2670 else
2671 SET_DECL_RTL (parm, parmreg);
2673 /* Copy the value into the register. */
2674 if (data->nominal_mode != data->passed_mode
2675 || promoted_nominal_mode != data->promoted_mode)
2677 int save_tree_used;
2679 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2680 mode, by the caller. We now have to convert it to
2681 NOMINAL_MODE, if different. However, PARMREG may be in
2682 a different mode than NOMINAL_MODE if it is being stored
2683 promoted.
2685 If ENTRY_PARM is a hard register, it might be in a register
2686 not valid for operating in its mode (e.g., an odd-numbered
2687 register for a DFmode). In that case, moves are the only
2688 thing valid, so we can't do a convert from there. This
2689 occurs when the calling sequence allow such misaligned
2690 usages.
2692 In addition, the conversion may involve a call, which could
2693 clobber parameters which haven't been copied to pseudo
2694 registers yet. Therefore, we must first copy the parm to
2695 a pseudo reg here, and save the conversion until after all
2696 parameters have been moved. */
2698 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2700 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2702 push_to_sequence (all->conversion_insns);
2703 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2705 if (GET_CODE (tempreg) == SUBREG
2706 && GET_MODE (tempreg) == data->nominal_mode
2707 && REG_P (SUBREG_REG (tempreg))
2708 && data->nominal_mode == data->passed_mode
2709 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2710 && GET_MODE_SIZE (GET_MODE (tempreg))
2711 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2713 /* The argument is already sign/zero extended, so note it
2714 into the subreg. */
2715 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2716 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2719 /* TREE_USED gets set erroneously during expand_assignment. */
2720 save_tree_used = TREE_USED (parm);
2721 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2722 TREE_USED (parm) = save_tree_used;
2723 all->conversion_insns = get_insns ();
2724 end_sequence ();
2726 did_conversion = true;
2728 else
2729 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2731 /* If we were passed a pointer but the actual value can safely live
2732 in a register, put it in one. */
2733 if (data->passed_pointer
2734 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2735 /* If by-reference argument was promoted, demote it. */
2736 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2737 || use_register_for_decl (parm)))
2739 /* We can't use nominal_mode, because it will have been set to
2740 Pmode above. We must use the actual mode of the parm. */
2741 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2742 mark_user_reg (parmreg);
2744 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2746 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2747 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2749 push_to_sequence (all->conversion_insns);
2750 emit_move_insn (tempreg, DECL_RTL (parm));
2751 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2752 emit_move_insn (parmreg, tempreg);
2753 all->conversion_insns = get_insns ();
2754 end_sequence ();
2756 did_conversion = true;
2758 else
2759 emit_move_insn (parmreg, DECL_RTL (parm));
2761 SET_DECL_RTL (parm, parmreg);
2763 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2764 now the parm. */
2765 data->stack_parm = NULL;
2768 /* Mark the register as eliminable if we did no conversion and it was
2769 copied from memory at a fixed offset, and the arg pointer was not
2770 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2771 offset formed an invalid address, such memory-equivalences as we
2772 make here would screw up life analysis for it. */
2773 if (data->nominal_mode == data->passed_mode
2774 && !did_conversion
2775 && data->stack_parm != 0
2776 && MEM_P (data->stack_parm)
2777 && data->locate.offset.var == 0
2778 && reg_mentioned_p (virtual_incoming_args_rtx,
2779 XEXP (data->stack_parm, 0)))
2781 rtx linsn = get_last_insn ();
2782 rtx sinsn, set;
2784 /* Mark complex types separately. */
2785 if (GET_CODE (parmreg) == CONCAT)
2787 enum machine_mode submode
2788 = GET_MODE_INNER (GET_MODE (parmreg));
2789 int regnor = REGNO (XEXP (parmreg, 0));
2790 int regnoi = REGNO (XEXP (parmreg, 1));
2791 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2792 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2793 GET_MODE_SIZE (submode));
2795 /* Scan backwards for the set of the real and
2796 imaginary parts. */
2797 for (sinsn = linsn; sinsn != 0;
2798 sinsn = prev_nonnote_insn (sinsn))
2800 set = single_set (sinsn);
2801 if (set == 0)
2802 continue;
2804 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2805 REG_NOTES (sinsn)
2806 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2807 REG_NOTES (sinsn));
2808 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2809 REG_NOTES (sinsn)
2810 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2811 REG_NOTES (sinsn));
2814 else if ((set = single_set (linsn)) != 0
2815 && SET_DEST (set) == parmreg)
2816 REG_NOTES (linsn)
2817 = gen_rtx_EXPR_LIST (REG_EQUIV,
2818 data->stack_parm, REG_NOTES (linsn));
2821 /* For pointer data type, suggest pointer register. */
2822 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2823 mark_reg_pointer (parmreg,
2824 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2827 /* A subroutine of assign_parms. Allocate stack space to hold the current
2828 parameter. Get it there. Perform all ABI specified conversions. */
2830 static void
2831 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2832 struct assign_parm_data_one *data)
2834 /* Value must be stored in the stack slot STACK_PARM during function
2835 execution. */
2836 bool to_conversion = false;
2838 if (data->promoted_mode != data->nominal_mode)
2840 /* Conversion is required. */
2841 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2843 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2845 push_to_sequence (all->conversion_insns);
2846 to_conversion = true;
2848 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2849 TYPE_UNSIGNED (TREE_TYPE (parm)));
2851 if (data->stack_parm)
2852 /* ??? This may need a big-endian conversion on sparc64. */
2853 data->stack_parm
2854 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2857 if (data->entry_parm != data->stack_parm)
2859 rtx src, dest;
2861 if (data->stack_parm == 0)
2863 data->stack_parm
2864 = assign_stack_local (GET_MODE (data->entry_parm),
2865 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2866 TYPE_ALIGN (data->passed_type));
2867 set_mem_attributes (data->stack_parm, parm, 1);
2870 dest = validize_mem (data->stack_parm);
2871 src = validize_mem (data->entry_parm);
2873 if (MEM_P (src))
2875 /* Use a block move to handle potentially misaligned entry_parm. */
2876 if (!to_conversion)
2877 push_to_sequence (all->conversion_insns);
2878 to_conversion = true;
2880 emit_block_move (dest, src,
2881 GEN_INT (int_size_in_bytes (data->passed_type)),
2882 BLOCK_OP_NORMAL);
2884 else
2885 emit_move_insn (dest, src);
2888 if (to_conversion)
2890 all->conversion_insns = get_insns ();
2891 end_sequence ();
2894 SET_DECL_RTL (parm, data->stack_parm);
2897 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2898 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2900 static void
2901 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2903 tree parm;
2904 tree orig_fnargs = all->orig_fnargs;
2906 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2908 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2909 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2911 rtx tmp, real, imag;
2912 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2914 real = DECL_RTL (fnargs);
2915 imag = DECL_RTL (TREE_CHAIN (fnargs));
2916 if (inner != GET_MODE (real))
2918 real = gen_lowpart_SUBREG (inner, real);
2919 imag = gen_lowpart_SUBREG (inner, imag);
2922 if (TREE_ADDRESSABLE (parm))
2924 rtx rmem, imem;
2925 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2927 /* split_complex_arg put the real and imag parts in
2928 pseudos. Move them to memory. */
2929 tmp = assign_stack_local (DECL_MODE (parm), size,
2930 TYPE_ALIGN (TREE_TYPE (parm)));
2931 set_mem_attributes (tmp, parm, 1);
2932 rmem = adjust_address_nv (tmp, inner, 0);
2933 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2934 push_to_sequence (all->conversion_insns);
2935 emit_move_insn (rmem, real);
2936 emit_move_insn (imem, imag);
2937 all->conversion_insns = get_insns ();
2938 end_sequence ();
2940 else
2941 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2942 SET_DECL_RTL (parm, tmp);
2944 real = DECL_INCOMING_RTL (fnargs);
2945 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2946 if (inner != GET_MODE (real))
2948 real = gen_lowpart_SUBREG (inner, real);
2949 imag = gen_lowpart_SUBREG (inner, imag);
2951 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2952 set_decl_incoming_rtl (parm, tmp);
2953 fnargs = TREE_CHAIN (fnargs);
2955 else
2957 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2958 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2960 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2961 instead of the copy of decl, i.e. FNARGS. */
2962 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2963 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2966 fnargs = TREE_CHAIN (fnargs);
2970 /* Assign RTL expressions to the function's parameters. This may involve
2971 copying them into registers and using those registers as the DECL_RTL. */
2973 static void
2974 assign_parms (tree fndecl)
2976 struct assign_parm_data_all all;
2977 tree fnargs, parm;
2979 current_function_internal_arg_pointer
2980 = targetm.calls.internal_arg_pointer ();
2982 assign_parms_initialize_all (&all);
2983 fnargs = assign_parms_augmented_arg_list (&all);
2985 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2987 struct assign_parm_data_one data;
2989 /* Extract the type of PARM; adjust it according to ABI. */
2990 assign_parm_find_data_types (&all, parm, &data);
2992 /* Early out for errors and void parameters. */
2993 if (data.passed_mode == VOIDmode)
2995 SET_DECL_RTL (parm, const0_rtx);
2996 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2997 continue;
3000 if (current_function_stdarg && !TREE_CHAIN (parm))
3001 assign_parms_setup_varargs (&all, &data, false);
3003 /* Find out where the parameter arrives in this function. */
3004 assign_parm_find_entry_rtl (&all, &data);
3006 /* Find out where stack space for this parameter might be. */
3007 if (assign_parm_is_stack_parm (&all, &data))
3009 assign_parm_find_stack_rtl (parm, &data);
3010 assign_parm_adjust_entry_rtl (&data);
3013 /* Record permanently how this parm was passed. */
3014 set_decl_incoming_rtl (parm, data.entry_parm);
3016 /* Update info on where next arg arrives in registers. */
3017 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3018 data.passed_type, data.named_arg);
3020 assign_parm_adjust_stack_rtl (&data);
3022 if (assign_parm_setup_block_p (&data))
3023 assign_parm_setup_block (&all, parm, &data);
3024 else if (data.passed_pointer || use_register_for_decl (parm))
3025 assign_parm_setup_reg (&all, parm, &data);
3026 else
3027 assign_parm_setup_stack (&all, parm, &data);
3030 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3031 assign_parms_unsplit_complex (&all, fnargs);
3033 /* Output all parameter conversion instructions (possibly including calls)
3034 now that all parameters have been copied out of hard registers. */
3035 emit_insn (all.conversion_insns);
3037 /* If we are receiving a struct value address as the first argument, set up
3038 the RTL for the function result. As this might require code to convert
3039 the transmitted address to Pmode, we do this here to ensure that possible
3040 preliminary conversions of the address have been emitted already. */
3041 if (all.function_result_decl)
3043 tree result = DECL_RESULT (current_function_decl);
3044 rtx addr = DECL_RTL (all.function_result_decl);
3045 rtx x;
3047 if (DECL_BY_REFERENCE (result))
3048 x = addr;
3049 else
3051 addr = convert_memory_address (Pmode, addr);
3052 x = gen_rtx_MEM (DECL_MODE (result), addr);
3053 set_mem_attributes (x, result, 1);
3055 SET_DECL_RTL (result, x);
3058 /* We have aligned all the args, so add space for the pretend args. */
3059 current_function_pretend_args_size = all.pretend_args_size;
3060 all.stack_args_size.constant += all.extra_pretend_bytes;
3061 current_function_args_size = all.stack_args_size.constant;
3063 /* Adjust function incoming argument size for alignment and
3064 minimum length. */
3066 #ifdef REG_PARM_STACK_SPACE
3067 current_function_args_size = MAX (current_function_args_size,
3068 REG_PARM_STACK_SPACE (fndecl));
3069 #endif
3071 current_function_args_size = CEIL_ROUND (current_function_args_size,
3072 PARM_BOUNDARY / BITS_PER_UNIT);
3074 #ifdef ARGS_GROW_DOWNWARD
3075 current_function_arg_offset_rtx
3076 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3077 : expand_expr (size_diffop (all.stack_args_size.var,
3078 size_int (-all.stack_args_size.constant)),
3079 NULL_RTX, VOIDmode, 0));
3080 #else
3081 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3082 #endif
3084 /* See how many bytes, if any, of its args a function should try to pop
3085 on return. */
3087 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3088 current_function_args_size);
3090 /* For stdarg.h function, save info about
3091 regs and stack space used by the named args. */
3093 current_function_args_info = all.args_so_far;
3095 /* Set the rtx used for the function return value. Put this in its
3096 own variable so any optimizers that need this information don't have
3097 to include tree.h. Do this here so it gets done when an inlined
3098 function gets output. */
3100 current_function_return_rtx
3101 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3102 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3104 /* If scalar return value was computed in a pseudo-reg, or was a named
3105 return value that got dumped to the stack, copy that to the hard
3106 return register. */
3107 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3109 tree decl_result = DECL_RESULT (fndecl);
3110 rtx decl_rtl = DECL_RTL (decl_result);
3112 if (REG_P (decl_rtl)
3113 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3114 : DECL_REGISTER (decl_result))
3116 rtx real_decl_rtl;
3118 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3119 fndecl, true);
3120 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3121 /* The delay slot scheduler assumes that current_function_return_rtx
3122 holds the hard register containing the return value, not a
3123 temporary pseudo. */
3124 current_function_return_rtx = real_decl_rtl;
3129 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3130 For all seen types, gimplify their sizes. */
3132 static tree
3133 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3135 tree t = *tp;
3137 *walk_subtrees = 0;
3138 if (TYPE_P (t))
3140 if (POINTER_TYPE_P (t))
3141 *walk_subtrees = 1;
3142 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3143 && !TYPE_SIZES_GIMPLIFIED (t))
3145 gimplify_type_sizes (t, (tree *) data);
3146 *walk_subtrees = 1;
3150 return NULL;
3153 /* Gimplify the parameter list for current_function_decl. This involves
3154 evaluating SAVE_EXPRs of variable sized parameters and generating code
3155 to implement callee-copies reference parameters. Returns a list of
3156 statements to add to the beginning of the function, or NULL if nothing
3157 to do. */
3159 tree
3160 gimplify_parameters (void)
3162 struct assign_parm_data_all all;
3163 tree fnargs, parm, stmts = NULL;
3165 assign_parms_initialize_all (&all);
3166 fnargs = assign_parms_augmented_arg_list (&all);
3168 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3170 struct assign_parm_data_one data;
3172 /* Extract the type of PARM; adjust it according to ABI. */
3173 assign_parm_find_data_types (&all, parm, &data);
3175 /* Early out for errors and void parameters. */
3176 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3177 continue;
3179 /* Update info on where next arg arrives in registers. */
3180 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3181 data.passed_type, data.named_arg);
3183 /* ??? Once upon a time variable_size stuffed parameter list
3184 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3185 turned out to be less than manageable in the gimple world.
3186 Now we have to hunt them down ourselves. */
3187 walk_tree_without_duplicates (&data.passed_type,
3188 gimplify_parm_type, &stmts);
3190 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3192 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3193 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3196 if (data.passed_pointer)
3198 tree type = TREE_TYPE (data.passed_type);
3199 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3200 type, data.named_arg))
3202 tree local, t;
3204 /* For constant sized objects, this is trivial; for
3205 variable-sized objects, we have to play games. */
3206 if (TREE_CONSTANT (DECL_SIZE (parm)))
3208 local = create_tmp_var (type, get_name (parm));
3209 DECL_IGNORED_P (local) = 0;
3211 else
3213 tree ptr_type, addr, args;
3215 ptr_type = build_pointer_type (type);
3216 addr = create_tmp_var (ptr_type, get_name (parm));
3217 DECL_IGNORED_P (addr) = 0;
3218 local = build_fold_indirect_ref (addr);
3220 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3221 t = built_in_decls[BUILT_IN_ALLOCA];
3222 t = build_function_call_expr (t, args);
3223 t = fold_convert (ptr_type, t);
3224 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3225 gimplify_and_add (t, &stmts);
3228 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3229 gimplify_and_add (t, &stmts);
3231 SET_DECL_VALUE_EXPR (parm, local);
3232 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3237 return stmts;
3240 /* Indicate whether REGNO is an incoming argument to the current function
3241 that was promoted to a wider mode. If so, return the RTX for the
3242 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3243 that REGNO is promoted from and whether the promotion was signed or
3244 unsigned. */
3247 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3249 tree arg;
3251 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3252 arg = TREE_CHAIN (arg))
3253 if (REG_P (DECL_INCOMING_RTL (arg))
3254 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3255 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3258 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3260 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3261 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3262 && mode != DECL_MODE (arg))
3264 *pmode = DECL_MODE (arg);
3265 *punsignedp = unsignedp;
3266 return DECL_INCOMING_RTL (arg);
3270 return 0;
3274 /* Compute the size and offset from the start of the stacked arguments for a
3275 parm passed in mode PASSED_MODE and with type TYPE.
3277 INITIAL_OFFSET_PTR points to the current offset into the stacked
3278 arguments.
3280 The starting offset and size for this parm are returned in
3281 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3282 nonzero, the offset is that of stack slot, which is returned in
3283 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3284 padding required from the initial offset ptr to the stack slot.
3286 IN_REGS is nonzero if the argument will be passed in registers. It will
3287 never be set if REG_PARM_STACK_SPACE is not defined.
3289 FNDECL is the function in which the argument was defined.
3291 There are two types of rounding that are done. The first, controlled by
3292 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3293 list to be aligned to the specific boundary (in bits). This rounding
3294 affects the initial and starting offsets, but not the argument size.
3296 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3297 optionally rounds the size of the parm to PARM_BOUNDARY. The
3298 initial offset is not affected by this rounding, while the size always
3299 is and the starting offset may be. */
3301 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3302 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3303 callers pass in the total size of args so far as
3304 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3306 void
3307 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3308 int partial, tree fndecl ATTRIBUTE_UNUSED,
3309 struct args_size *initial_offset_ptr,
3310 struct locate_and_pad_arg_data *locate)
3312 tree sizetree;
3313 enum direction where_pad;
3314 unsigned int boundary;
3315 int reg_parm_stack_space = 0;
3316 int part_size_in_regs;
3318 #ifdef REG_PARM_STACK_SPACE
3319 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3321 /* If we have found a stack parm before we reach the end of the
3322 area reserved for registers, skip that area. */
3323 if (! in_regs)
3325 if (reg_parm_stack_space > 0)
3327 if (initial_offset_ptr->var)
3329 initial_offset_ptr->var
3330 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3331 ssize_int (reg_parm_stack_space));
3332 initial_offset_ptr->constant = 0;
3334 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3335 initial_offset_ptr->constant = reg_parm_stack_space;
3338 #endif /* REG_PARM_STACK_SPACE */
3340 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3342 sizetree
3343 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3344 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3345 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3346 locate->where_pad = where_pad;
3347 locate->boundary = boundary;
3349 /* Remember if the outgoing parameter requires extra alignment on the
3350 calling function side. */
3351 if (boundary > PREFERRED_STACK_BOUNDARY)
3352 boundary = PREFERRED_STACK_BOUNDARY;
3353 if (cfun->stack_alignment_needed < boundary)
3354 cfun->stack_alignment_needed = boundary;
3356 #ifdef ARGS_GROW_DOWNWARD
3357 locate->slot_offset.constant = -initial_offset_ptr->constant;
3358 if (initial_offset_ptr->var)
3359 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3360 initial_offset_ptr->var);
3363 tree s2 = sizetree;
3364 if (where_pad != none
3365 && (!host_integerp (sizetree, 1)
3366 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3367 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3368 SUB_PARM_SIZE (locate->slot_offset, s2);
3371 locate->slot_offset.constant += part_size_in_regs;
3373 if (!in_regs
3374 #ifdef REG_PARM_STACK_SPACE
3375 || REG_PARM_STACK_SPACE (fndecl) > 0
3376 #endif
3378 pad_to_arg_alignment (&locate->slot_offset, boundary,
3379 &locate->alignment_pad);
3381 locate->size.constant = (-initial_offset_ptr->constant
3382 - locate->slot_offset.constant);
3383 if (initial_offset_ptr->var)
3384 locate->size.var = size_binop (MINUS_EXPR,
3385 size_binop (MINUS_EXPR,
3386 ssize_int (0),
3387 initial_offset_ptr->var),
3388 locate->slot_offset.var);
3390 /* Pad_below needs the pre-rounded size to know how much to pad
3391 below. */
3392 locate->offset = locate->slot_offset;
3393 if (where_pad == downward)
3394 pad_below (&locate->offset, passed_mode, sizetree);
3396 #else /* !ARGS_GROW_DOWNWARD */
3397 if (!in_regs
3398 #ifdef REG_PARM_STACK_SPACE
3399 || REG_PARM_STACK_SPACE (fndecl) > 0
3400 #endif
3402 pad_to_arg_alignment (initial_offset_ptr, boundary,
3403 &locate->alignment_pad);
3404 locate->slot_offset = *initial_offset_ptr;
3406 #ifdef PUSH_ROUNDING
3407 if (passed_mode != BLKmode)
3408 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3409 #endif
3411 /* Pad_below needs the pre-rounded size to know how much to pad below
3412 so this must be done before rounding up. */
3413 locate->offset = locate->slot_offset;
3414 if (where_pad == downward)
3415 pad_below (&locate->offset, passed_mode, sizetree);
3417 if (where_pad != none
3418 && (!host_integerp (sizetree, 1)
3419 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3420 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3422 ADD_PARM_SIZE (locate->size, sizetree);
3424 locate->size.constant -= part_size_in_regs;
3425 #endif /* ARGS_GROW_DOWNWARD */
3428 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3429 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3431 static void
3432 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3433 struct args_size *alignment_pad)
3435 tree save_var = NULL_TREE;
3436 HOST_WIDE_INT save_constant = 0;
3437 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3438 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3440 #ifdef SPARC_STACK_BOUNDARY_HACK
3441 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3442 the real alignment of %sp. However, when it does this, the
3443 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3444 if (SPARC_STACK_BOUNDARY_HACK)
3445 sp_offset = 0;
3446 #endif
3448 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3450 save_var = offset_ptr->var;
3451 save_constant = offset_ptr->constant;
3454 alignment_pad->var = NULL_TREE;
3455 alignment_pad->constant = 0;
3457 if (boundary > BITS_PER_UNIT)
3459 if (offset_ptr->var)
3461 tree sp_offset_tree = ssize_int (sp_offset);
3462 tree offset = size_binop (PLUS_EXPR,
3463 ARGS_SIZE_TREE (*offset_ptr),
3464 sp_offset_tree);
3465 #ifdef ARGS_GROW_DOWNWARD
3466 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3467 #else
3468 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3469 #endif
3471 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3472 /* ARGS_SIZE_TREE includes constant term. */
3473 offset_ptr->constant = 0;
3474 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3475 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3476 save_var);
3478 else
3480 offset_ptr->constant = -sp_offset +
3481 #ifdef ARGS_GROW_DOWNWARD
3482 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3483 #else
3484 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3485 #endif
3486 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3487 alignment_pad->constant = offset_ptr->constant - save_constant;
3492 static void
3493 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3495 if (passed_mode != BLKmode)
3497 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3498 offset_ptr->constant
3499 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3500 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3501 - GET_MODE_SIZE (passed_mode));
3503 else
3505 if (TREE_CODE (sizetree) != INTEGER_CST
3506 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3508 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3509 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3510 /* Add it in. */
3511 ADD_PARM_SIZE (*offset_ptr, s2);
3512 SUB_PARM_SIZE (*offset_ptr, sizetree);
3517 /* Walk the tree of blocks describing the binding levels within a function
3518 and warn about variables the might be killed by setjmp or vfork.
3519 This is done after calling flow_analysis and before global_alloc
3520 clobbers the pseudo-regs to hard regs. */
3522 void
3523 setjmp_vars_warning (tree block)
3525 tree decl, sub;
3527 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3529 if (TREE_CODE (decl) == VAR_DECL
3530 && DECL_RTL_SET_P (decl)
3531 && REG_P (DECL_RTL (decl))
3532 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3533 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3534 " or %<vfork%>",
3535 decl);
3538 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3539 setjmp_vars_warning (sub);
3542 /* Do the appropriate part of setjmp_vars_warning
3543 but for arguments instead of local variables. */
3545 void
3546 setjmp_args_warning (void)
3548 tree decl;
3549 for (decl = DECL_ARGUMENTS (current_function_decl);
3550 decl; decl = TREE_CHAIN (decl))
3551 if (DECL_RTL (decl) != 0
3552 && REG_P (DECL_RTL (decl))
3553 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3554 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3555 decl);
3559 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3560 and create duplicate blocks. */
3561 /* ??? Need an option to either create block fragments or to create
3562 abstract origin duplicates of a source block. It really depends
3563 on what optimization has been performed. */
3565 void
3566 reorder_blocks (void)
3568 tree block = DECL_INITIAL (current_function_decl);
3569 VEC(tree,heap) *block_stack;
3571 if (block == NULL_TREE)
3572 return;
3574 block_stack = VEC_alloc (tree, heap, 10);
3576 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3577 clear_block_marks (block);
3579 /* Prune the old trees away, so that they don't get in the way. */
3580 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3581 BLOCK_CHAIN (block) = NULL_TREE;
3583 /* Recreate the block tree from the note nesting. */
3584 reorder_blocks_1 (get_insns (), block, &block_stack);
3585 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3587 /* Remove deleted blocks from the block fragment chains. */
3588 reorder_fix_fragments (block);
3590 VEC_free (tree, heap, block_stack);
3593 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3595 void
3596 clear_block_marks (tree block)
3598 while (block)
3600 TREE_ASM_WRITTEN (block) = 0;
3601 clear_block_marks (BLOCK_SUBBLOCKS (block));
3602 block = BLOCK_CHAIN (block);
3606 static void
3607 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3609 rtx insn;
3611 for (insn = insns; insn; insn = NEXT_INSN (insn))
3613 if (NOTE_P (insn))
3615 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3617 tree block = NOTE_BLOCK (insn);
3619 /* If we have seen this block before, that means it now
3620 spans multiple address regions. Create a new fragment. */
3621 if (TREE_ASM_WRITTEN (block))
3623 tree new_block = copy_node (block);
3624 tree origin;
3626 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3627 ? BLOCK_FRAGMENT_ORIGIN (block)
3628 : block);
3629 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3630 BLOCK_FRAGMENT_CHAIN (new_block)
3631 = BLOCK_FRAGMENT_CHAIN (origin);
3632 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3634 NOTE_BLOCK (insn) = new_block;
3635 block = new_block;
3638 BLOCK_SUBBLOCKS (block) = 0;
3639 TREE_ASM_WRITTEN (block) = 1;
3640 /* When there's only one block for the entire function,
3641 current_block == block and we mustn't do this, it
3642 will cause infinite recursion. */
3643 if (block != current_block)
3645 BLOCK_SUPERCONTEXT (block) = current_block;
3646 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3647 BLOCK_SUBBLOCKS (current_block) = block;
3648 current_block = block;
3650 VEC_safe_push (tree, heap, *p_block_stack, block);
3652 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3654 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3655 BLOCK_SUBBLOCKS (current_block)
3656 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3657 current_block = BLOCK_SUPERCONTEXT (current_block);
3663 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3664 appears in the block tree, select one of the fragments to become
3665 the new origin block. */
3667 static void
3668 reorder_fix_fragments (tree block)
3670 while (block)
3672 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3673 tree new_origin = NULL_TREE;
3675 if (dup_origin)
3677 if (! TREE_ASM_WRITTEN (dup_origin))
3679 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3681 /* Find the first of the remaining fragments. There must
3682 be at least one -- the current block. */
3683 while (! TREE_ASM_WRITTEN (new_origin))
3684 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3685 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3688 else if (! dup_origin)
3689 new_origin = block;
3691 /* Re-root the rest of the fragments to the new origin. In the
3692 case that DUP_ORIGIN was null, that means BLOCK was the origin
3693 of a chain of fragments and we want to remove those fragments
3694 that didn't make it to the output. */
3695 if (new_origin)
3697 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3698 tree chain = *pp;
3700 while (chain)
3702 if (TREE_ASM_WRITTEN (chain))
3704 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3705 *pp = chain;
3706 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3708 chain = BLOCK_FRAGMENT_CHAIN (chain);
3710 *pp = NULL_TREE;
3713 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3714 block = BLOCK_CHAIN (block);
3718 /* Reverse the order of elements in the chain T of blocks,
3719 and return the new head of the chain (old last element). */
3721 tree
3722 blocks_nreverse (tree t)
3724 tree prev = 0, decl, next;
3725 for (decl = t; decl; decl = next)
3727 next = BLOCK_CHAIN (decl);
3728 BLOCK_CHAIN (decl) = prev;
3729 prev = decl;
3731 return prev;
3734 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3735 non-NULL, list them all into VECTOR, in a depth-first preorder
3736 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3737 blocks. */
3739 static int
3740 all_blocks (tree block, tree *vector)
3742 int n_blocks = 0;
3744 while (block)
3746 TREE_ASM_WRITTEN (block) = 0;
3748 /* Record this block. */
3749 if (vector)
3750 vector[n_blocks] = block;
3752 ++n_blocks;
3754 /* Record the subblocks, and their subblocks... */
3755 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3756 vector ? vector + n_blocks : 0);
3757 block = BLOCK_CHAIN (block);
3760 return n_blocks;
3763 /* Return a vector containing all the blocks rooted at BLOCK. The
3764 number of elements in the vector is stored in N_BLOCKS_P. The
3765 vector is dynamically allocated; it is the caller's responsibility
3766 to call `free' on the pointer returned. */
3768 static tree *
3769 get_block_vector (tree block, int *n_blocks_p)
3771 tree *block_vector;
3773 *n_blocks_p = all_blocks (block, NULL);
3774 block_vector = XNEWVEC (tree, *n_blocks_p);
3775 all_blocks (block, block_vector);
3777 return block_vector;
3780 static GTY(()) int next_block_index = 2;
3782 /* Set BLOCK_NUMBER for all the blocks in FN. */
3784 void
3785 number_blocks (tree fn)
3787 int i;
3788 int n_blocks;
3789 tree *block_vector;
3791 /* For SDB and XCOFF debugging output, we start numbering the blocks
3792 from 1 within each function, rather than keeping a running
3793 count. */
3794 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3795 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3796 next_block_index = 1;
3797 #endif
3799 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3801 /* The top-level BLOCK isn't numbered at all. */
3802 for (i = 1; i < n_blocks; ++i)
3803 /* We number the blocks from two. */
3804 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3806 free (block_vector);
3808 return;
3811 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3813 tree
3814 debug_find_var_in_block_tree (tree var, tree block)
3816 tree t;
3818 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3819 if (t == var)
3820 return block;
3822 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3824 tree ret = debug_find_var_in_block_tree (var, t);
3825 if (ret)
3826 return ret;
3829 return NULL_TREE;
3832 /* Allocate a function structure for FNDECL and set its contents
3833 to the defaults. */
3835 void
3836 allocate_struct_function (tree fndecl)
3838 tree result;
3839 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3841 cfun = ggc_alloc_cleared (sizeof (struct function));
3843 cfun->stack_alignment_needed = STACK_BOUNDARY;
3844 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3846 current_function_funcdef_no = funcdef_no++;
3848 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3850 init_eh_for_function ();
3852 lang_hooks.function.init (cfun);
3853 if (init_machine_status)
3854 cfun->machine = (*init_machine_status) ();
3856 if (fndecl == NULL)
3857 return;
3859 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3860 cfun->decl = fndecl;
3862 result = DECL_RESULT (fndecl);
3863 if (aggregate_value_p (result, fndecl))
3865 #ifdef PCC_STATIC_STRUCT_RETURN
3866 current_function_returns_pcc_struct = 1;
3867 #endif
3868 current_function_returns_struct = 1;
3871 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3873 current_function_stdarg
3874 = (fntype
3875 && TYPE_ARG_TYPES (fntype) != 0
3876 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3877 != void_type_node));
3879 /* Assume all registers in stdarg functions need to be saved. */
3880 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3881 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3884 /* Reset cfun, and other non-struct-function variables to defaults as
3885 appropriate for emitting rtl at the start of a function. */
3887 static void
3888 prepare_function_start (tree fndecl)
3890 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3891 cfun = DECL_STRUCT_FUNCTION (fndecl);
3892 else
3893 allocate_struct_function (fndecl);
3894 init_emit ();
3895 init_varasm_status (cfun);
3896 init_expr ();
3898 cse_not_expected = ! optimize;
3900 /* Caller save not needed yet. */
3901 caller_save_needed = 0;
3903 /* We haven't done register allocation yet. */
3904 reg_renumber = 0;
3906 /* Indicate that we have not instantiated virtual registers yet. */
3907 virtuals_instantiated = 0;
3909 /* Indicate that we want CONCATs now. */
3910 generating_concat_p = 1;
3912 /* Indicate we have no need of a frame pointer yet. */
3913 frame_pointer_needed = 0;
3916 /* Initialize the rtl expansion mechanism so that we can do simple things
3917 like generate sequences. This is used to provide a context during global
3918 initialization of some passes. */
3919 void
3920 init_dummy_function_start (void)
3922 prepare_function_start (NULL);
3925 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3926 and initialize static variables for generating RTL for the statements
3927 of the function. */
3929 void
3930 init_function_start (tree subr)
3932 prepare_function_start (subr);
3934 /* Prevent ever trying to delete the first instruction of a
3935 function. Also tell final how to output a linenum before the
3936 function prologue. Note linenums could be missing, e.g. when
3937 compiling a Java .class file. */
3938 if (! DECL_IS_BUILTIN (subr))
3939 emit_line_note (DECL_SOURCE_LOCATION (subr));
3941 /* Make sure first insn is a note even if we don't want linenums.
3942 This makes sure the first insn will never be deleted.
3943 Also, final expects a note to appear there. */
3944 emit_note (NOTE_INSN_DELETED);
3946 /* Warn if this value is an aggregate type,
3947 regardless of which calling convention we are using for it. */
3948 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3949 warning (OPT_Waggregate_return, "function returns an aggregate");
3952 /* Make sure all values used by the optimization passes have sane
3953 defaults. */
3954 unsigned int
3955 init_function_for_compilation (void)
3957 reg_renumber = 0;
3959 /* No prologue/epilogue insns yet. Make sure that these vectors are
3960 empty. */
3961 gcc_assert (VEC_length (int, prologue) == 0);
3962 gcc_assert (VEC_length (int, epilogue) == 0);
3963 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3964 return 0;
3967 struct tree_opt_pass pass_init_function =
3969 NULL, /* name */
3970 NULL, /* gate */
3971 init_function_for_compilation, /* execute */
3972 NULL, /* sub */
3973 NULL, /* next */
3974 0, /* static_pass_number */
3975 0, /* tv_id */
3976 0, /* properties_required */
3977 0, /* properties_provided */
3978 0, /* properties_destroyed */
3979 0, /* todo_flags_start */
3980 0, /* todo_flags_finish */
3981 0 /* letter */
3985 void
3986 expand_main_function (void)
3988 #if (defined(INVOKE__main) \
3989 || (!defined(HAS_INIT_SECTION) \
3990 && !defined(INIT_SECTION_ASM_OP) \
3991 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3992 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3993 #endif
3996 /* Expand code to initialize the stack_protect_guard. This is invoked at
3997 the beginning of a function to be protected. */
3999 #ifndef HAVE_stack_protect_set
4000 # define HAVE_stack_protect_set 0
4001 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4002 #endif
4004 void
4005 stack_protect_prologue (void)
4007 tree guard_decl = targetm.stack_protect_guard ();
4008 rtx x, y;
4010 /* Avoid expand_expr here, because we don't want guard_decl pulled
4011 into registers unless absolutely necessary. And we know that
4012 cfun->stack_protect_guard is a local stack slot, so this skips
4013 all the fluff. */
4014 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4015 y = validize_mem (DECL_RTL (guard_decl));
4017 /* Allow the target to copy from Y to X without leaking Y into a
4018 register. */
4019 if (HAVE_stack_protect_set)
4021 rtx insn = gen_stack_protect_set (x, y);
4022 if (insn)
4024 emit_insn (insn);
4025 return;
4029 /* Otherwise do a straight move. */
4030 emit_move_insn (x, y);
4033 /* Expand code to verify the stack_protect_guard. This is invoked at
4034 the end of a function to be protected. */
4036 #ifndef HAVE_stack_protect_test
4037 # define HAVE_stack_protect_test 0
4038 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4039 #endif
4041 void
4042 stack_protect_epilogue (void)
4044 tree guard_decl = targetm.stack_protect_guard ();
4045 rtx label = gen_label_rtx ();
4046 rtx x, y, tmp;
4048 /* Avoid expand_expr here, because we don't want guard_decl pulled
4049 into registers unless absolutely necessary. And we know that
4050 cfun->stack_protect_guard is a local stack slot, so this skips
4051 all the fluff. */
4052 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4053 y = validize_mem (DECL_RTL (guard_decl));
4055 /* Allow the target to compare Y with X without leaking either into
4056 a register. */
4057 switch (HAVE_stack_protect_test != 0)
4059 case 1:
4060 tmp = gen_stack_protect_test (x, y, label);
4061 if (tmp)
4063 emit_insn (tmp);
4064 break;
4066 /* FALLTHRU */
4068 default:
4069 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4070 break;
4073 /* The noreturn predictor has been moved to the tree level. The rtl-level
4074 predictors estimate this branch about 20%, which isn't enough to get
4075 things moved out of line. Since this is the only extant case of adding
4076 a noreturn function at the rtl level, it doesn't seem worth doing ought
4077 except adding the prediction by hand. */
4078 tmp = get_last_insn ();
4079 if (JUMP_P (tmp))
4080 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4082 expand_expr_stmt (targetm.stack_protect_fail ());
4083 emit_label (label);
4086 /* Start the RTL for a new function, and set variables used for
4087 emitting RTL.
4088 SUBR is the FUNCTION_DECL node.
4089 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4090 the function's parameters, which must be run at any return statement. */
4092 void
4093 expand_function_start (tree subr)
4095 /* Make sure volatile mem refs aren't considered
4096 valid operands of arithmetic insns. */
4097 init_recog_no_volatile ();
4099 current_function_profile
4100 = (profile_flag
4101 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4103 current_function_limit_stack
4104 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4106 /* Make the label for return statements to jump to. Do not special
4107 case machines with special return instructions -- they will be
4108 handled later during jump, ifcvt, or epilogue creation. */
4109 return_label = gen_label_rtx ();
4111 /* Initialize rtx used to return the value. */
4112 /* Do this before assign_parms so that we copy the struct value address
4113 before any library calls that assign parms might generate. */
4115 /* Decide whether to return the value in memory or in a register. */
4116 if (aggregate_value_p (DECL_RESULT (subr), subr))
4118 /* Returning something that won't go in a register. */
4119 rtx value_address = 0;
4121 #ifdef PCC_STATIC_STRUCT_RETURN
4122 if (current_function_returns_pcc_struct)
4124 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4125 value_address = assemble_static_space (size);
4127 else
4128 #endif
4130 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4131 /* Expect to be passed the address of a place to store the value.
4132 If it is passed as an argument, assign_parms will take care of
4133 it. */
4134 if (sv)
4136 value_address = gen_reg_rtx (Pmode);
4137 emit_move_insn (value_address, sv);
4140 if (value_address)
4142 rtx x = value_address;
4143 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4145 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4146 set_mem_attributes (x, DECL_RESULT (subr), 1);
4148 SET_DECL_RTL (DECL_RESULT (subr), x);
4151 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4152 /* If return mode is void, this decl rtl should not be used. */
4153 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4154 else
4156 /* Compute the return values into a pseudo reg, which we will copy
4157 into the true return register after the cleanups are done. */
4158 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4159 if (TYPE_MODE (return_type) != BLKmode
4160 && targetm.calls.return_in_msb (return_type))
4161 /* expand_function_end will insert the appropriate padding in
4162 this case. Use the return value's natural (unpadded) mode
4163 within the function proper. */
4164 SET_DECL_RTL (DECL_RESULT (subr),
4165 gen_reg_rtx (TYPE_MODE (return_type)));
4166 else
4168 /* In order to figure out what mode to use for the pseudo, we
4169 figure out what the mode of the eventual return register will
4170 actually be, and use that. */
4171 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4173 /* Structures that are returned in registers are not
4174 aggregate_value_p, so we may see a PARALLEL or a REG. */
4175 if (REG_P (hard_reg))
4176 SET_DECL_RTL (DECL_RESULT (subr),
4177 gen_reg_rtx (GET_MODE (hard_reg)));
4178 else
4180 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4181 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4185 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4186 result to the real return register(s). */
4187 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4190 /* Initialize rtx for parameters and local variables.
4191 In some cases this requires emitting insns. */
4192 assign_parms (subr);
4194 /* If function gets a static chain arg, store it. */
4195 if (cfun->static_chain_decl)
4197 tree parm = cfun->static_chain_decl;
4198 rtx local = gen_reg_rtx (Pmode);
4200 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4201 SET_DECL_RTL (parm, local);
4202 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4204 emit_move_insn (local, static_chain_incoming_rtx);
4207 /* If the function receives a non-local goto, then store the
4208 bits we need to restore the frame pointer. */
4209 if (cfun->nonlocal_goto_save_area)
4211 tree t_save;
4212 rtx r_save;
4214 /* ??? We need to do this save early. Unfortunately here is
4215 before the frame variable gets declared. Help out... */
4216 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4218 t_save = build4 (ARRAY_REF, ptr_type_node,
4219 cfun->nonlocal_goto_save_area,
4220 integer_zero_node, NULL_TREE, NULL_TREE);
4221 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4222 r_save = convert_memory_address (Pmode, r_save);
4224 emit_move_insn (r_save, virtual_stack_vars_rtx);
4225 update_nonlocal_goto_save_area ();
4228 /* The following was moved from init_function_start.
4229 The move is supposed to make sdb output more accurate. */
4230 /* Indicate the beginning of the function body,
4231 as opposed to parm setup. */
4232 emit_note (NOTE_INSN_FUNCTION_BEG);
4234 gcc_assert (NOTE_P (get_last_insn ()));
4236 parm_birth_insn = get_last_insn ();
4238 if (current_function_profile)
4240 #ifdef PROFILE_HOOK
4241 PROFILE_HOOK (current_function_funcdef_no);
4242 #endif
4245 /* After the display initializations is where the stack checking
4246 probe should go. */
4247 if(flag_stack_check)
4248 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4250 /* Make sure there is a line number after the function entry setup code. */
4251 force_next_line_note ();
4254 /* Undo the effects of init_dummy_function_start. */
4255 void
4256 expand_dummy_function_end (void)
4258 /* End any sequences that failed to be closed due to syntax errors. */
4259 while (in_sequence_p ())
4260 end_sequence ();
4262 /* Outside function body, can't compute type's actual size
4263 until next function's body starts. */
4265 free_after_parsing (cfun);
4266 free_after_compilation (cfun);
4267 cfun = 0;
4270 /* Call DOIT for each hard register used as a return value from
4271 the current function. */
4273 void
4274 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4276 rtx outgoing = current_function_return_rtx;
4278 if (! outgoing)
4279 return;
4281 if (REG_P (outgoing))
4282 (*doit) (outgoing, arg);
4283 else if (GET_CODE (outgoing) == PARALLEL)
4285 int i;
4287 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4289 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4291 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4292 (*doit) (x, arg);
4297 static void
4298 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4300 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4303 void
4304 clobber_return_register (void)
4306 diddle_return_value (do_clobber_return_reg, NULL);
4308 /* In case we do use pseudo to return value, clobber it too. */
4309 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4311 tree decl_result = DECL_RESULT (current_function_decl);
4312 rtx decl_rtl = DECL_RTL (decl_result);
4313 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4315 do_clobber_return_reg (decl_rtl, NULL);
4320 static void
4321 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4323 emit_insn (gen_rtx_USE (VOIDmode, reg));
4326 static void
4327 use_return_register (void)
4329 diddle_return_value (do_use_return_reg, NULL);
4332 /* Possibly warn about unused parameters. */
4333 void
4334 do_warn_unused_parameter (tree fn)
4336 tree decl;
4338 for (decl = DECL_ARGUMENTS (fn);
4339 decl; decl = TREE_CHAIN (decl))
4340 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4341 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4342 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4345 static GTY(()) rtx initial_trampoline;
4347 /* Generate RTL for the end of the current function. */
4349 void
4350 expand_function_end (void)
4352 rtx clobber_after;
4354 /* If arg_pointer_save_area was referenced only from a nested
4355 function, we will not have initialized it yet. Do that now. */
4356 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4357 get_arg_pointer_save_area (cfun);
4359 /* If we are doing stack checking and this function makes calls,
4360 do a stack probe at the start of the function to ensure we have enough
4361 space for another stack frame. */
4362 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4364 rtx insn, seq;
4366 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4367 if (CALL_P (insn))
4369 start_sequence ();
4370 probe_stack_range (STACK_CHECK_PROTECT,
4371 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4372 seq = get_insns ();
4373 end_sequence ();
4374 emit_insn_before (seq, stack_check_probe_note);
4375 break;
4379 /* Possibly warn about unused parameters.
4380 When frontend does unit-at-a-time, the warning is already
4381 issued at finalization time. */
4382 if (warn_unused_parameter
4383 && !lang_hooks.callgraph.expand_function)
4384 do_warn_unused_parameter (current_function_decl);
4386 /* End any sequences that failed to be closed due to syntax errors. */
4387 while (in_sequence_p ())
4388 end_sequence ();
4390 clear_pending_stack_adjust ();
4391 do_pending_stack_adjust ();
4393 /* Mark the end of the function body.
4394 If control reaches this insn, the function can drop through
4395 without returning a value. */
4396 emit_note (NOTE_INSN_FUNCTION_END);
4398 /* Must mark the last line number note in the function, so that the test
4399 coverage code can avoid counting the last line twice. This just tells
4400 the code to ignore the immediately following line note, since there
4401 already exists a copy of this note somewhere above. This line number
4402 note is still needed for debugging though, so we can't delete it. */
4403 if (flag_test_coverage)
4404 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4406 /* Output a linenumber for the end of the function.
4407 SDB depends on this. */
4408 force_next_line_note ();
4409 emit_line_note (input_location);
4411 /* Before the return label (if any), clobber the return
4412 registers so that they are not propagated live to the rest of
4413 the function. This can only happen with functions that drop
4414 through; if there had been a return statement, there would
4415 have either been a return rtx, or a jump to the return label.
4417 We delay actual code generation after the current_function_value_rtx
4418 is computed. */
4419 clobber_after = get_last_insn ();
4421 /* Output the label for the actual return from the function. */
4422 emit_label (return_label);
4424 if (USING_SJLJ_EXCEPTIONS)
4426 /* Let except.c know where it should emit the call to unregister
4427 the function context for sjlj exceptions. */
4428 if (flag_exceptions)
4429 sjlj_emit_function_exit_after (get_last_insn ());
4431 else
4433 /* @@@ This is a kludge. We want to ensure that instructions that
4434 may trap are not moved into the epilogue by scheduling, because
4435 we don't always emit unwind information for the epilogue.
4436 However, not all machine descriptions define a blockage insn, so
4437 emit an ASM_INPUT to act as one. */
4438 if (flag_non_call_exceptions)
4439 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4442 /* If this is an implementation of throw, do what's necessary to
4443 communicate between __builtin_eh_return and the epilogue. */
4444 expand_eh_return ();
4446 /* If scalar return value was computed in a pseudo-reg, or was a named
4447 return value that got dumped to the stack, copy that to the hard
4448 return register. */
4449 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4451 tree decl_result = DECL_RESULT (current_function_decl);
4452 rtx decl_rtl = DECL_RTL (decl_result);
4454 if (REG_P (decl_rtl)
4455 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4456 : DECL_REGISTER (decl_result))
4458 rtx real_decl_rtl = current_function_return_rtx;
4460 /* This should be set in assign_parms. */
4461 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4463 /* If this is a BLKmode structure being returned in registers,
4464 then use the mode computed in expand_return. Note that if
4465 decl_rtl is memory, then its mode may have been changed,
4466 but that current_function_return_rtx has not. */
4467 if (GET_MODE (real_decl_rtl) == BLKmode)
4468 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4470 /* If a non-BLKmode return value should be padded at the least
4471 significant end of the register, shift it left by the appropriate
4472 amount. BLKmode results are handled using the group load/store
4473 machinery. */
4474 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4475 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4477 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4478 REGNO (real_decl_rtl)),
4479 decl_rtl);
4480 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4482 /* If a named return value dumped decl_return to memory, then
4483 we may need to re-do the PROMOTE_MODE signed/unsigned
4484 extension. */
4485 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4487 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4489 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4490 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4491 &unsignedp, 1);
4493 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4495 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4497 /* If expand_function_start has created a PARALLEL for decl_rtl,
4498 move the result to the real return registers. Otherwise, do
4499 a group load from decl_rtl for a named return. */
4500 if (GET_CODE (decl_rtl) == PARALLEL)
4501 emit_group_move (real_decl_rtl, decl_rtl);
4502 else
4503 emit_group_load (real_decl_rtl, decl_rtl,
4504 TREE_TYPE (decl_result),
4505 int_size_in_bytes (TREE_TYPE (decl_result)));
4507 /* In the case of complex integer modes smaller than a word, we'll
4508 need to generate some non-trivial bitfield insertions. Do that
4509 on a pseudo and not the hard register. */
4510 else if (GET_CODE (decl_rtl) == CONCAT
4511 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4512 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4514 int old_generating_concat_p;
4515 rtx tmp;
4517 old_generating_concat_p = generating_concat_p;
4518 generating_concat_p = 0;
4519 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4520 generating_concat_p = old_generating_concat_p;
4522 emit_move_insn (tmp, decl_rtl);
4523 emit_move_insn (real_decl_rtl, tmp);
4525 else
4526 emit_move_insn (real_decl_rtl, decl_rtl);
4530 /* If returning a structure, arrange to return the address of the value
4531 in a place where debuggers expect to find it.
4533 If returning a structure PCC style,
4534 the caller also depends on this value.
4535 And current_function_returns_pcc_struct is not necessarily set. */
4536 if (current_function_returns_struct
4537 || current_function_returns_pcc_struct)
4539 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4540 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4541 rtx outgoing;
4543 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4544 type = TREE_TYPE (type);
4545 else
4546 value_address = XEXP (value_address, 0);
4548 outgoing = targetm.calls.function_value (build_pointer_type (type),
4549 current_function_decl, true);
4551 /* Mark this as a function return value so integrate will delete the
4552 assignment and USE below when inlining this function. */
4553 REG_FUNCTION_VALUE_P (outgoing) = 1;
4555 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4556 value_address = convert_memory_address (GET_MODE (outgoing),
4557 value_address);
4559 emit_move_insn (outgoing, value_address);
4561 /* Show return register used to hold result (in this case the address
4562 of the result. */
4563 current_function_return_rtx = outgoing;
4566 /* Emit the actual code to clobber return register. */
4568 rtx seq;
4570 start_sequence ();
4571 clobber_return_register ();
4572 expand_naked_return ();
4573 seq = get_insns ();
4574 end_sequence ();
4576 emit_insn_after (seq, clobber_after);
4579 /* Output the label for the naked return from the function. */
4580 emit_label (naked_return_label);
4582 /* If stack protection is enabled for this function, check the guard. */
4583 if (cfun->stack_protect_guard)
4584 stack_protect_epilogue ();
4586 /* If we had calls to alloca, and this machine needs
4587 an accurate stack pointer to exit the function,
4588 insert some code to save and restore the stack pointer. */
4589 if (! EXIT_IGNORE_STACK
4590 && current_function_calls_alloca)
4592 rtx tem = 0;
4594 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4595 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4598 /* ??? This should no longer be necessary since stupid is no longer with
4599 us, but there are some parts of the compiler (eg reload_combine, and
4600 sh mach_dep_reorg) that still try and compute their own lifetime info
4601 instead of using the general framework. */
4602 use_return_register ();
4606 get_arg_pointer_save_area (struct function *f)
4608 rtx ret = f->x_arg_pointer_save_area;
4610 if (! ret)
4612 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4613 f->x_arg_pointer_save_area = ret;
4616 if (f == cfun && ! f->arg_pointer_save_area_init)
4618 rtx seq;
4620 /* Save the arg pointer at the beginning of the function. The
4621 generated stack slot may not be a valid memory address, so we
4622 have to check it and fix it if necessary. */
4623 start_sequence ();
4624 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4625 seq = get_insns ();
4626 end_sequence ();
4628 push_topmost_sequence ();
4629 emit_insn_after (seq, entry_of_function ());
4630 pop_topmost_sequence ();
4633 return ret;
4636 /* Extend a vector that records the INSN_UIDs of INSNS
4637 (a list of one or more insns). */
4639 static void
4640 record_insns (rtx insns, VEC(int,heap) **vecp)
4642 rtx tmp;
4644 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4645 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4648 /* Set the locator of the insn chain starting at INSN to LOC. */
4649 static void
4650 set_insn_locators (rtx insn, int loc)
4652 while (insn != NULL_RTX)
4654 if (INSN_P (insn))
4655 INSN_LOCATOR (insn) = loc;
4656 insn = NEXT_INSN (insn);
4660 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4661 be running after reorg, SEQUENCE rtl is possible. */
4663 static int
4664 contains (rtx insn, VEC(int,heap) **vec)
4666 int i, j;
4668 if (NONJUMP_INSN_P (insn)
4669 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4671 int count = 0;
4672 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4673 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4674 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4675 == VEC_index (int, *vec, j))
4676 count++;
4677 return count;
4679 else
4681 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4682 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4683 return 1;
4685 return 0;
4689 prologue_epilogue_contains (rtx insn)
4691 if (contains (insn, &prologue))
4692 return 1;
4693 if (contains (insn, &epilogue))
4694 return 1;
4695 return 0;
4699 sibcall_epilogue_contains (rtx insn)
4701 if (sibcall_epilogue)
4702 return contains (insn, &sibcall_epilogue);
4703 return 0;
4706 #ifdef HAVE_return
4707 /* Insert gen_return at the end of block BB. This also means updating
4708 block_for_insn appropriately. */
4710 static void
4711 emit_return_into_block (basic_block bb, rtx line_note)
4713 emit_jump_insn_after (gen_return (), BB_END (bb));
4714 if (line_note)
4715 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4717 #endif /* HAVE_return */
4719 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4721 /* These functions convert the epilogue into a variant that does not
4722 modify the stack pointer. This is used in cases where a function
4723 returns an object whose size is not known until it is computed.
4724 The called function leaves the object on the stack, leaves the
4725 stack depressed, and returns a pointer to the object.
4727 What we need to do is track all modifications and references to the
4728 stack pointer, deleting the modifications and changing the
4729 references to point to the location the stack pointer would have
4730 pointed to had the modifications taken place.
4732 These functions need to be portable so we need to make as few
4733 assumptions about the epilogue as we can. However, the epilogue
4734 basically contains three things: instructions to reset the stack
4735 pointer, instructions to reload registers, possibly including the
4736 frame pointer, and an instruction to return to the caller.
4738 We must be sure of what a relevant epilogue insn is doing. We also
4739 make no attempt to validate the insns we make since if they are
4740 invalid, we probably can't do anything valid. The intent is that
4741 these routines get "smarter" as more and more machines start to use
4742 them and they try operating on different epilogues.
4744 We use the following structure to track what the part of the
4745 epilogue that we've already processed has done. We keep two copies
4746 of the SP equivalence, one for use during the insn we are
4747 processing and one for use in the next insn. The difference is
4748 because one part of a PARALLEL may adjust SP and the other may use
4749 it. */
4751 struct epi_info
4753 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4754 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4755 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4756 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4757 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4758 should be set to once we no longer need
4759 its value. */
4760 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4761 for registers. */
4764 static void handle_epilogue_set (rtx, struct epi_info *);
4765 static void update_epilogue_consts (rtx, rtx, void *);
4766 static void emit_equiv_load (struct epi_info *);
4768 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4769 no modifications to the stack pointer. Return the new list of insns. */
4771 static rtx
4772 keep_stack_depressed (rtx insns)
4774 int j;
4775 struct epi_info info;
4776 rtx insn, next;
4778 /* If the epilogue is just a single instruction, it must be OK as is. */
4779 if (NEXT_INSN (insns) == NULL_RTX)
4780 return insns;
4782 /* Otherwise, start a sequence, initialize the information we have, and
4783 process all the insns we were given. */
4784 start_sequence ();
4786 info.sp_equiv_reg = stack_pointer_rtx;
4787 info.sp_offset = 0;
4788 info.equiv_reg_src = 0;
4790 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4791 info.const_equiv[j] = 0;
4793 insn = insns;
4794 next = NULL_RTX;
4795 while (insn != NULL_RTX)
4797 next = NEXT_INSN (insn);
4799 if (!INSN_P (insn))
4801 add_insn (insn);
4802 insn = next;
4803 continue;
4806 /* If this insn references the register that SP is equivalent to and
4807 we have a pending load to that register, we must force out the load
4808 first and then indicate we no longer know what SP's equivalent is. */
4809 if (info.equiv_reg_src != 0
4810 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4812 emit_equiv_load (&info);
4813 info.sp_equiv_reg = 0;
4816 info.new_sp_equiv_reg = info.sp_equiv_reg;
4817 info.new_sp_offset = info.sp_offset;
4819 /* If this is a (RETURN) and the return address is on the stack,
4820 update the address and change to an indirect jump. */
4821 if (GET_CODE (PATTERN (insn)) == RETURN
4822 || (GET_CODE (PATTERN (insn)) == PARALLEL
4823 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4825 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4826 rtx base = 0;
4827 HOST_WIDE_INT offset = 0;
4828 rtx jump_insn, jump_set;
4830 /* If the return address is in a register, we can emit the insn
4831 unchanged. Otherwise, it must be a MEM and we see what the
4832 base register and offset are. In any case, we have to emit any
4833 pending load to the equivalent reg of SP, if any. */
4834 if (REG_P (retaddr))
4836 emit_equiv_load (&info);
4837 add_insn (insn);
4838 insn = next;
4839 continue;
4841 else
4843 rtx ret_ptr;
4844 gcc_assert (MEM_P (retaddr));
4846 ret_ptr = XEXP (retaddr, 0);
4848 if (REG_P (ret_ptr))
4850 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4851 offset = 0;
4853 else
4855 gcc_assert (GET_CODE (ret_ptr) == PLUS
4856 && REG_P (XEXP (ret_ptr, 0))
4857 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4858 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4859 offset = INTVAL (XEXP (ret_ptr, 1));
4863 /* If the base of the location containing the return pointer
4864 is SP, we must update it with the replacement address. Otherwise,
4865 just build the necessary MEM. */
4866 retaddr = plus_constant (base, offset);
4867 if (base == stack_pointer_rtx)
4868 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4869 plus_constant (info.sp_equiv_reg,
4870 info.sp_offset));
4872 retaddr = gen_rtx_MEM (Pmode, retaddr);
4873 MEM_NOTRAP_P (retaddr) = 1;
4875 /* If there is a pending load to the equivalent register for SP
4876 and we reference that register, we must load our address into
4877 a scratch register and then do that load. */
4878 if (info.equiv_reg_src
4879 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4881 unsigned int regno;
4882 rtx reg;
4884 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4885 if (HARD_REGNO_MODE_OK (regno, Pmode)
4886 && !fixed_regs[regno]
4887 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4888 && !REGNO_REG_SET_P
4889 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4890 && !refers_to_regno_p (regno,
4891 regno + hard_regno_nregs[regno]
4892 [Pmode],
4893 info.equiv_reg_src, NULL)
4894 && info.const_equiv[regno] == 0)
4895 break;
4897 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4899 reg = gen_rtx_REG (Pmode, regno);
4900 emit_move_insn (reg, retaddr);
4901 retaddr = reg;
4904 emit_equiv_load (&info);
4905 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4907 /* Show the SET in the above insn is a RETURN. */
4908 jump_set = single_set (jump_insn);
4909 gcc_assert (jump_set);
4910 SET_IS_RETURN_P (jump_set) = 1;
4913 /* If SP is not mentioned in the pattern and its equivalent register, if
4914 any, is not modified, just emit it. Otherwise, if neither is set,
4915 replace the reference to SP and emit the insn. If none of those are
4916 true, handle each SET individually. */
4917 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4918 && (info.sp_equiv_reg == stack_pointer_rtx
4919 || !reg_set_p (info.sp_equiv_reg, insn)))
4920 add_insn (insn);
4921 else if (! reg_set_p (stack_pointer_rtx, insn)
4922 && (info.sp_equiv_reg == stack_pointer_rtx
4923 || !reg_set_p (info.sp_equiv_reg, insn)))
4925 int changed;
4927 changed = validate_replace_rtx (stack_pointer_rtx,
4928 plus_constant (info.sp_equiv_reg,
4929 info.sp_offset),
4930 insn);
4931 gcc_assert (changed);
4933 add_insn (insn);
4935 else if (GET_CODE (PATTERN (insn)) == SET)
4936 handle_epilogue_set (PATTERN (insn), &info);
4937 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4939 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4940 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4941 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4943 else
4944 add_insn (insn);
4946 info.sp_equiv_reg = info.new_sp_equiv_reg;
4947 info.sp_offset = info.new_sp_offset;
4949 /* Now update any constants this insn sets. */
4950 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4951 insn = next;
4954 insns = get_insns ();
4955 end_sequence ();
4956 return insns;
4959 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4960 structure that contains information about what we've seen so far. We
4961 process this SET by either updating that data or by emitting one or
4962 more insns. */
4964 static void
4965 handle_epilogue_set (rtx set, struct epi_info *p)
4967 /* First handle the case where we are setting SP. Record what it is being
4968 set from, which we must be able to determine */
4969 if (reg_set_p (stack_pointer_rtx, set))
4971 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4973 if (GET_CODE (SET_SRC (set)) == PLUS)
4975 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4976 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4977 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4978 else
4980 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4981 && (REGNO (XEXP (SET_SRC (set), 1))
4982 < FIRST_PSEUDO_REGISTER)
4983 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4984 p->new_sp_offset
4985 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4988 else
4989 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4991 /* If we are adjusting SP, we adjust from the old data. */
4992 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4994 p->new_sp_equiv_reg = p->sp_equiv_reg;
4995 p->new_sp_offset += p->sp_offset;
4998 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
5000 return;
5003 /* Next handle the case where we are setting SP's equivalent
5004 register. We must not already have a value to set it to. We
5005 could update, but there seems little point in handling that case.
5006 Note that we have to allow for the case where we are setting the
5007 register set in the previous part of a PARALLEL inside a single
5008 insn. But use the old offset for any updates within this insn.
5009 We must allow for the case where the register is being set in a
5010 different (usually wider) mode than Pmode). */
5011 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5013 gcc_assert (!p->equiv_reg_src
5014 && REG_P (p->new_sp_equiv_reg)
5015 && REG_P (SET_DEST (set))
5016 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5017 <= BITS_PER_WORD)
5018 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5019 p->equiv_reg_src
5020 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5021 plus_constant (p->sp_equiv_reg,
5022 p->sp_offset));
5025 /* Otherwise, replace any references to SP in the insn to its new value
5026 and emit the insn. */
5027 else
5029 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5030 plus_constant (p->sp_equiv_reg,
5031 p->sp_offset));
5032 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5033 plus_constant (p->sp_equiv_reg,
5034 p->sp_offset));
5035 emit_insn (set);
5039 /* Update the tracking information for registers set to constants. */
5041 static void
5042 update_epilogue_consts (rtx dest, rtx x, void *data)
5044 struct epi_info *p = (struct epi_info *) data;
5045 rtx new;
5047 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5048 return;
5050 /* If we are either clobbering a register or doing a partial set,
5051 show we don't know the value. */
5052 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5053 p->const_equiv[REGNO (dest)] = 0;
5055 /* If we are setting it to a constant, record that constant. */
5056 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5057 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5059 /* If this is a binary operation between a register we have been tracking
5060 and a constant, see if we can compute a new constant value. */
5061 else if (ARITHMETIC_P (SET_SRC (x))
5062 && REG_P (XEXP (SET_SRC (x), 0))
5063 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5064 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5065 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5066 && 0 != (new = simplify_binary_operation
5067 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5068 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5069 XEXP (SET_SRC (x), 1)))
5070 && GET_CODE (new) == CONST_INT)
5071 p->const_equiv[REGNO (dest)] = new;
5073 /* Otherwise, we can't do anything with this value. */
5074 else
5075 p->const_equiv[REGNO (dest)] = 0;
5078 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5080 static void
5081 emit_equiv_load (struct epi_info *p)
5083 if (p->equiv_reg_src != 0)
5085 rtx dest = p->sp_equiv_reg;
5087 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5088 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5089 REGNO (p->sp_equiv_reg));
5091 emit_move_insn (dest, p->equiv_reg_src);
5092 p->equiv_reg_src = 0;
5095 #endif
5097 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5098 this into place with notes indicating where the prologue ends and where
5099 the epilogue begins. Update the basic block information when possible. */
5101 void
5102 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5104 int inserted = 0;
5105 edge e;
5106 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5107 rtx seq;
5108 #endif
5109 #ifdef HAVE_prologue
5110 rtx prologue_end = NULL_RTX;
5111 #endif
5112 #if defined (HAVE_epilogue) || defined(HAVE_return)
5113 rtx epilogue_end = NULL_RTX;
5114 #endif
5115 edge_iterator ei;
5117 #ifdef HAVE_prologue
5118 if (HAVE_prologue)
5120 start_sequence ();
5121 seq = gen_prologue ();
5122 emit_insn (seq);
5124 /* Retain a map of the prologue insns. */
5125 record_insns (seq, &prologue);
5126 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5128 seq = get_insns ();
5129 end_sequence ();
5130 set_insn_locators (seq, prologue_locator);
5132 /* Can't deal with multiple successors of the entry block
5133 at the moment. Function should always have at least one
5134 entry point. */
5135 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5137 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5138 inserted = 1;
5140 #endif
5142 /* If the exit block has no non-fake predecessors, we don't need
5143 an epilogue. */
5144 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5145 if ((e->flags & EDGE_FAKE) == 0)
5146 break;
5147 if (e == NULL)
5148 goto epilogue_done;
5150 #ifdef HAVE_return
5151 if (optimize && HAVE_return)
5153 /* If we're allowed to generate a simple return instruction,
5154 then by definition we don't need a full epilogue. Examine
5155 the block that falls through to EXIT. If it does not
5156 contain any code, examine its predecessors and try to
5157 emit (conditional) return instructions. */
5159 basic_block last;
5160 rtx label;
5162 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5163 if (e->flags & EDGE_FALLTHRU)
5164 break;
5165 if (e == NULL)
5166 goto epilogue_done;
5167 last = e->src;
5169 /* Verify that there are no active instructions in the last block. */
5170 label = BB_END (last);
5171 while (label && !LABEL_P (label))
5173 if (active_insn_p (label))
5174 break;
5175 label = PREV_INSN (label);
5178 if (BB_HEAD (last) == label && LABEL_P (label))
5180 edge_iterator ei2;
5181 rtx epilogue_line_note = NULL_RTX;
5183 /* Locate the line number associated with the closing brace,
5184 if we can find one. */
5185 for (seq = get_last_insn ();
5186 seq && ! active_insn_p (seq);
5187 seq = PREV_INSN (seq))
5188 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5190 epilogue_line_note = seq;
5191 break;
5194 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5196 basic_block bb = e->src;
5197 rtx jump;
5199 if (bb == ENTRY_BLOCK_PTR)
5201 ei_next (&ei2);
5202 continue;
5205 jump = BB_END (bb);
5206 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5208 ei_next (&ei2);
5209 continue;
5212 /* If we have an unconditional jump, we can replace that
5213 with a simple return instruction. */
5214 if (simplejump_p (jump))
5216 emit_return_into_block (bb, epilogue_line_note);
5217 delete_insn (jump);
5220 /* If we have a conditional jump, we can try to replace
5221 that with a conditional return instruction. */
5222 else if (condjump_p (jump))
5224 if (! redirect_jump (jump, 0, 0))
5226 ei_next (&ei2);
5227 continue;
5230 /* If this block has only one successor, it both jumps
5231 and falls through to the fallthru block, so we can't
5232 delete the edge. */
5233 if (single_succ_p (bb))
5235 ei_next (&ei2);
5236 continue;
5239 else
5241 ei_next (&ei2);
5242 continue;
5245 /* Fix up the CFG for the successful change we just made. */
5246 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5249 /* Emit a return insn for the exit fallthru block. Whether
5250 this is still reachable will be determined later. */
5252 emit_barrier_after (BB_END (last));
5253 emit_return_into_block (last, epilogue_line_note);
5254 epilogue_end = BB_END (last);
5255 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5256 goto epilogue_done;
5259 #endif
5260 /* Find the edge that falls through to EXIT. Other edges may exist
5261 due to RETURN instructions, but those don't need epilogues.
5262 There really shouldn't be a mixture -- either all should have
5263 been converted or none, however... */
5265 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5266 if (e->flags & EDGE_FALLTHRU)
5267 break;
5268 if (e == NULL)
5269 goto epilogue_done;
5271 #ifdef HAVE_epilogue
5272 if (HAVE_epilogue)
5274 start_sequence ();
5275 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5277 seq = gen_epilogue ();
5279 #ifdef INCOMING_RETURN_ADDR_RTX
5280 /* If this function returns with the stack depressed and we can support
5281 it, massage the epilogue to actually do that. */
5282 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5283 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5284 seq = keep_stack_depressed (seq);
5285 #endif
5287 emit_jump_insn (seq);
5289 /* Retain a map of the epilogue insns. */
5290 record_insns (seq, &epilogue);
5291 set_insn_locators (seq, epilogue_locator);
5293 seq = get_insns ();
5294 end_sequence ();
5296 insert_insn_on_edge (seq, e);
5297 inserted = 1;
5299 else
5300 #endif
5302 basic_block cur_bb;
5304 if (! next_active_insn (BB_END (e->src)))
5305 goto epilogue_done;
5306 /* We have a fall-through edge to the exit block, the source is not
5307 at the end of the function, and there will be an assembler epilogue
5308 at the end of the function.
5309 We can't use force_nonfallthru here, because that would try to
5310 use return. Inserting a jump 'by hand' is extremely messy, so
5311 we take advantage of cfg_layout_finalize using
5312 fixup_fallthru_exit_predecessor. */
5313 cfg_layout_initialize (0);
5314 FOR_EACH_BB (cur_bb)
5315 if (cur_bb->index >= NUM_FIXED_BLOCKS
5316 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5317 cur_bb->aux = cur_bb->next_bb;
5318 cfg_layout_finalize ();
5320 epilogue_done:
5322 if (inserted)
5323 commit_edge_insertions ();
5325 #ifdef HAVE_sibcall_epilogue
5326 /* Emit sibling epilogues before any sibling call sites. */
5327 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5329 basic_block bb = e->src;
5330 rtx insn = BB_END (bb);
5332 if (!CALL_P (insn)
5333 || ! SIBLING_CALL_P (insn))
5335 ei_next (&ei);
5336 continue;
5339 start_sequence ();
5340 emit_insn (gen_sibcall_epilogue ());
5341 seq = get_insns ();
5342 end_sequence ();
5344 /* Retain a map of the epilogue insns. Used in life analysis to
5345 avoid getting rid of sibcall epilogue insns. Do this before we
5346 actually emit the sequence. */
5347 record_insns (seq, &sibcall_epilogue);
5348 set_insn_locators (seq, epilogue_locator);
5350 emit_insn_before (seq, insn);
5351 ei_next (&ei);
5353 #endif
5355 #ifdef HAVE_prologue
5356 /* This is probably all useless now that we use locators. */
5357 if (prologue_end)
5359 rtx insn, prev;
5361 /* GDB handles `break f' by setting a breakpoint on the first
5362 line note after the prologue. Which means (1) that if
5363 there are line number notes before where we inserted the
5364 prologue we should move them, and (2) we should generate a
5365 note before the end of the first basic block, if there isn't
5366 one already there.
5368 ??? This behavior is completely broken when dealing with
5369 multiple entry functions. We simply place the note always
5370 into first basic block and let alternate entry points
5371 to be missed.
5374 for (insn = prologue_end; insn; insn = prev)
5376 prev = PREV_INSN (insn);
5377 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5379 /* Note that we cannot reorder the first insn in the
5380 chain, since rest_of_compilation relies on that
5381 remaining constant. */
5382 if (prev == NULL)
5383 break;
5384 reorder_insns (insn, insn, prologue_end);
5388 /* Find the last line number note in the first block. */
5389 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5390 insn != prologue_end && insn;
5391 insn = PREV_INSN (insn))
5392 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5393 break;
5395 /* If we didn't find one, make a copy of the first line number
5396 we run across. */
5397 if (! insn)
5399 for (insn = next_active_insn (prologue_end);
5400 insn;
5401 insn = PREV_INSN (insn))
5402 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5404 emit_note_copy_after (insn, prologue_end);
5405 break;
5409 #endif
5410 #ifdef HAVE_epilogue
5411 if (epilogue_end)
5413 rtx insn, next;
5415 /* Similarly, move any line notes that appear after the epilogue.
5416 There is no need, however, to be quite so anal about the existence
5417 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5418 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5419 info generation. */
5420 for (insn = epilogue_end; insn; insn = next)
5422 next = NEXT_INSN (insn);
5423 if (NOTE_P (insn)
5424 && (NOTE_LINE_NUMBER (insn) > 0
5425 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5426 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5427 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5430 #endif
5433 /* Reposition the prologue-end and epilogue-begin notes after instruction
5434 scheduling and delayed branch scheduling. */
5436 void
5437 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5439 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5440 rtx insn, last, note;
5441 int len;
5443 if ((len = VEC_length (int, prologue)) > 0)
5445 last = 0, note = 0;
5447 /* Scan from the beginning until we reach the last prologue insn.
5448 We apparently can't depend on basic_block_{head,end} after
5449 reorg has run. */
5450 for (insn = f; insn; insn = NEXT_INSN (insn))
5452 if (NOTE_P (insn))
5454 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5455 note = insn;
5457 else if (contains (insn, &prologue))
5459 last = insn;
5460 if (--len == 0)
5461 break;
5465 if (last)
5467 /* Find the prologue-end note if we haven't already, and
5468 move it to just after the last prologue insn. */
5469 if (note == 0)
5471 for (note = last; (note = NEXT_INSN (note));)
5472 if (NOTE_P (note)
5473 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5474 break;
5477 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5478 if (LABEL_P (last))
5479 last = NEXT_INSN (last);
5480 reorder_insns (note, note, last);
5484 if ((len = VEC_length (int, epilogue)) > 0)
5486 last = 0, note = 0;
5488 /* Scan from the end until we reach the first epilogue insn.
5489 We apparently can't depend on basic_block_{head,end} after
5490 reorg has run. */
5491 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5493 if (NOTE_P (insn))
5495 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5496 note = insn;
5498 else if (contains (insn, &epilogue))
5500 last = insn;
5501 if (--len == 0)
5502 break;
5506 if (last)
5508 /* Find the epilogue-begin note if we haven't already, and
5509 move it to just before the first epilogue insn. */
5510 if (note == 0)
5512 for (note = insn; (note = PREV_INSN (note));)
5513 if (NOTE_P (note)
5514 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5515 break;
5518 if (PREV_INSN (last) != note)
5519 reorder_insns (note, note, PREV_INSN (last));
5522 #endif /* HAVE_prologue or HAVE_epilogue */
5525 /* Resets insn_block_boundaries array. */
5527 void
5528 reset_block_changes (void)
5530 cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5531 VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5534 /* Record the boundary for BLOCK. */
5535 void
5536 record_block_change (tree block)
5538 int i, n;
5539 tree last_block;
5541 if (!block)
5542 return;
5544 if(!cfun->ib_boundaries_block)
5545 return;
5547 last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5548 n = get_max_uid ();
5549 for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5550 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5552 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5555 /* Finishes record of boundaries. */
5556 void finalize_block_changes (void)
5558 record_block_change (DECL_INITIAL (current_function_decl));
5561 /* For INSN return the BLOCK it belongs to. */
5562 void
5563 check_block_change (rtx insn, tree *block)
5565 unsigned uid = INSN_UID (insn);
5567 if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5568 return;
5570 *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5573 /* Releases the ib_boundaries_block records. */
5574 void
5575 free_block_changes (void)
5577 VEC_free (tree, gc, cfun->ib_boundaries_block);
5580 /* Returns the name of the current function. */
5581 const char *
5582 current_function_name (void)
5584 return lang_hooks.decl_printable_name (cfun->decl, 2);
5588 static unsigned int
5589 rest_of_handle_check_leaf_regs (void)
5591 #ifdef LEAF_REGISTERS
5592 current_function_uses_only_leaf_regs
5593 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5594 #endif
5595 return 0;
5598 /* Insert a TYPE into the used types hash table of CFUN. */
5599 static void
5600 used_types_insert_helper (tree type, struct function *func)
5602 if (type != NULL && func != NULL)
5604 void **slot;
5606 if (func->used_types_hash == NULL)
5607 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5608 htab_eq_pointer, NULL);
5609 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5610 if (*slot == NULL)
5611 *slot = type;
5615 /* Given a type, insert it into the used hash table in cfun. */
5616 void
5617 used_types_insert (tree t)
5619 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5620 t = TREE_TYPE (t);
5621 t = TYPE_MAIN_VARIANT (t);
5622 if (debug_info_level > DINFO_LEVEL_NONE)
5623 used_types_insert_helper (t, cfun);
5626 struct tree_opt_pass pass_leaf_regs =
5628 NULL, /* name */
5629 NULL, /* gate */
5630 rest_of_handle_check_leaf_regs, /* execute */
5631 NULL, /* sub */
5632 NULL, /* next */
5633 0, /* static_pass_number */
5634 0, /* tv_id */
5635 0, /* properties_required */
5636 0, /* properties_provided */
5637 0, /* properties_destroyed */
5638 0, /* todo_flags_start */
5639 0, /* todo_flags_finish */
5640 0 /* letter */
5644 #include "gt-function.h"