Merge from mainline
[official-gcc.git] / gcc / function.c
blob00a673ac4bd801264f171e3bbedad74b7641fbef
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
97 compiler passes. */
98 int current_function_is_leaf;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 life_analysis has run. */
103 int current_function_sp_is_unchanging;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function * (*init_machine_status) (void);
123 /* The currently compiled function. */
124 struct function *cfun = 0;
126 DEF_VEC_I(int);
127 DEF_VEC_ALLOC_I(int,heap);
129 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
130 static VEC(int,heap) *prologue;
131 static VEC(int,heap) *epilogue;
133 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
134 in this function. */
135 static VEC(int,heap) *sibcall_epilogue;
137 /* In order to evaluate some expressions, such as function calls returning
138 structures in memory, we need to temporarily allocate stack locations.
139 We record each allocated temporary in the following structure.
141 Associated with each temporary slot is a nesting level. When we pop up
142 one level, all temporaries associated with the previous level are freed.
143 Normally, all temporaries are freed after the execution of the statement
144 in which they were created. However, if we are inside a ({...}) grouping,
145 the result may be in a temporary and hence must be preserved. If the
146 result could be in a temporary, we preserve it if we can determine which
147 one it is in. If we cannot determine which temporary may contain the
148 result, all temporaries are preserved. A temporary is preserved by
149 pretending it was allocated at the previous nesting level.
151 Automatic variables are also assigned temporary slots, at the nesting
152 level where they are defined. They are marked a "kept" so that
153 free_temp_slots will not free them. */
155 struct temp_slot GTY(())
157 /* Points to next temporary slot. */
158 struct temp_slot *next;
159 /* Points to previous temporary slot. */
160 struct temp_slot *prev;
162 /* The rtx to used to reference the slot. */
163 rtx slot;
164 /* The rtx used to represent the address if not the address of the
165 slot above. May be an EXPR_LIST if multiple addresses exist. */
166 rtx address;
167 /* The alignment (in bits) of the slot. */
168 unsigned int align;
169 /* The size, in units, of the slot. */
170 HOST_WIDE_INT size;
171 /* The type of the object in the slot, or zero if it doesn't correspond
172 to a type. We use this to determine whether a slot can be reused.
173 It can be reused if objects of the type of the new slot will always
174 conflict with objects of the type of the old slot. */
175 tree type;
176 /* Nonzero if this temporary is currently in use. */
177 char in_use;
178 /* Nonzero if this temporary has its address taken. */
179 char addr_taken;
180 /* Nesting level at which this slot is being used. */
181 int level;
182 /* Nonzero if this should survive a call to free_temp_slots. */
183 int keep;
184 /* The offset of the slot from the frame_pointer, including extra space
185 for alignment. This info is for combine_temp_slots. */
186 HOST_WIDE_INT base_offset;
187 /* The size of the slot, including extra space for alignment. This
188 info is for combine_temp_slots. */
189 HOST_WIDE_INT full_size;
192 /* Forward declarations. */
194 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
195 struct function *);
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static void reorder_fix_fragments (tree);
201 static int all_blocks (tree, tree *);
202 static tree *get_block_vector (tree, int *);
203 extern tree debug_find_var_in_block_tree (tree, tree);
204 /* We always define `record_insns' even if it's not used so that we
205 can always export `prologue_epilogue_contains'. */
206 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
207 static int contains (rtx, VEC(int,heap) **);
208 #ifdef HAVE_return
209 static void emit_return_into_block (basic_block, rtx);
210 #endif
211 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
212 static rtx keep_stack_depressed (rtx);
213 #endif
214 static void prepare_function_start (tree);
215 static void do_clobber_return_reg (rtx, void *);
216 static void do_use_return_reg (rtx, void *);
217 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
219 /* Pointer to chain of `struct function' for containing functions. */
220 struct function *outer_function_chain;
222 /* Given a function decl for a containing function,
223 return the `struct function' for it. */
225 struct function *
226 find_function_data (tree decl)
228 struct function *p;
230 for (p = outer_function_chain; p; p = p->outer)
231 if (p->decl == decl)
232 return p;
234 gcc_unreachable ();
237 /* Save the current context for compilation of a nested function.
238 This is called from language-specific code. The caller should use
239 the enter_nested langhook to save any language-specific state,
240 since this function knows only about language-independent
241 variables. */
243 void
244 push_function_context_to (tree context ATTRIBUTE_UNUSED)
246 struct function *p;
248 if (cfun == 0)
249 init_dummy_function_start ();
250 p = cfun;
252 p->outer = outer_function_chain;
253 outer_function_chain = p;
255 lang_hooks.function.enter_nested (p);
257 cfun = 0;
260 void
261 push_function_context (void)
263 push_function_context_to (current_function_decl);
266 /* Restore the last saved context, at the end of a nested function.
267 This function is called from language-specific code. */
269 void
270 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
272 struct function *p = outer_function_chain;
274 cfun = p;
275 outer_function_chain = p->outer;
277 current_function_decl = p->decl;
279 lang_hooks.function.leave_nested (p);
281 /* Reset variables that have known state during rtx generation. */
282 virtuals_instantiated = 0;
283 generating_concat_p = 1;
286 void
287 pop_function_context (void)
289 pop_function_context_from (current_function_decl);
292 /* Clear out all parts of the state in F that can safely be discarded
293 after the function has been parsed, but not compiled, to let
294 garbage collection reclaim the memory. */
296 void
297 free_after_parsing (struct function *f)
299 /* f->expr->forced_labels is used by code generation. */
300 /* f->emit->regno_reg_rtx is used by code generation. */
301 /* f->varasm is used by code generation. */
302 /* f->eh->eh_return_stub_label is used by code generation. */
304 lang_hooks.function.final (f);
307 /* Clear out all parts of the state in F that can safely be discarded
308 after the function has been compiled, to let garbage collection
309 reclaim the memory. */
311 void
312 free_after_compilation (struct function *f)
314 VEC_free (int, heap, prologue);
315 VEC_free (int, heap, epilogue);
316 VEC_free (int, heap, sibcall_epilogue);
318 f->eh = NULL;
319 f->expr = NULL;
320 f->emit = NULL;
321 f->varasm = NULL;
322 f->machine = NULL;
323 f->cfg = NULL;
325 f->x_avail_temp_slots = NULL;
326 f->x_used_temp_slots = NULL;
327 f->arg_offset_rtx = NULL;
328 f->return_rtx = NULL;
329 f->internal_arg_pointer = NULL;
330 f->x_nonlocal_goto_handler_labels = NULL;
331 f->x_return_label = NULL;
332 f->x_naked_return_label = NULL;
333 f->x_stack_slot_list = NULL;
334 f->x_tail_recursion_reentry = NULL;
335 f->x_arg_pointer_save_area = NULL;
336 f->x_parm_birth_insn = NULL;
337 f->original_arg_vector = NULL;
338 f->original_decl_initial = NULL;
339 f->epilogue_delay_list = NULL;
342 /* Allocate fixed slots in the stack frame of the current function. */
344 /* Return size needed for stack frame based on slots so far allocated in
345 function F.
346 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
347 the caller may have to do that. */
349 static HOST_WIDE_INT
350 get_func_frame_size (struct function *f)
352 if (FRAME_GROWS_DOWNWARD)
353 return -f->x_frame_offset;
354 else
355 return f->x_frame_offset;
358 /* Return size needed for stack frame based on slots so far allocated.
359 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
360 the caller may have to do that. */
361 HOST_WIDE_INT
362 get_frame_size (void)
364 return get_func_frame_size (cfun);
367 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
368 with machine mode MODE.
370 ALIGN controls the amount of alignment for the address of the slot:
371 0 means according to MODE,
372 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
373 -2 means use BITS_PER_UNIT,
374 positive specifies alignment boundary in bits.
376 We do not round to stack_boundary here.
378 FUNCTION specifies the function to allocate in. */
380 static rtx
381 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
382 struct function *function)
384 rtx x, addr;
385 int bigend_correction = 0;
386 unsigned int alignment;
387 int frame_off, frame_alignment, frame_phase;
389 if (align == 0)
391 tree type;
393 if (mode == BLKmode)
394 alignment = BIGGEST_ALIGNMENT;
395 else
396 alignment = GET_MODE_ALIGNMENT (mode);
398 /* Allow the target to (possibly) increase the alignment of this
399 stack slot. */
400 type = lang_hooks.types.type_for_mode (mode, 0);
401 if (type)
402 alignment = LOCAL_ALIGNMENT (type, alignment);
404 alignment /= BITS_PER_UNIT;
406 else if (align == -1)
408 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
409 size = CEIL_ROUND (size, alignment);
411 else if (align == -2)
412 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
413 else
414 alignment = align / BITS_PER_UNIT;
416 if (FRAME_GROWS_DOWNWARD)
417 function->x_frame_offset -= size;
419 /* Ignore alignment we can't do with expected alignment of the boundary. */
420 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
421 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
423 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
424 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
426 /* Calculate how many bytes the start of local variables is off from
427 stack alignment. */
428 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
429 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
430 frame_phase = frame_off ? frame_alignment - frame_off : 0;
432 /* Round the frame offset to the specified alignment. The default is
433 to always honor requests to align the stack but a port may choose to
434 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
435 if (STACK_ALIGNMENT_NEEDED
436 || mode != BLKmode
437 || size != 0)
439 /* We must be careful here, since FRAME_OFFSET might be negative and
440 division with a negative dividend isn't as well defined as we might
441 like. So we instead assume that ALIGNMENT is a power of two and
442 use logical operations which are unambiguous. */
443 if (FRAME_GROWS_DOWNWARD)
444 function->x_frame_offset
445 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
446 (unsigned HOST_WIDE_INT) alignment)
447 + frame_phase);
448 else
449 function->x_frame_offset
450 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
451 (unsigned HOST_WIDE_INT) alignment)
452 + frame_phase);
455 /* On a big-endian machine, if we are allocating more space than we will use,
456 use the least significant bytes of those that are allocated. */
457 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
458 bigend_correction = size - GET_MODE_SIZE (mode);
460 /* If we have already instantiated virtual registers, return the actual
461 address relative to the frame pointer. */
462 if (function == cfun && virtuals_instantiated)
463 addr = plus_constant (frame_pointer_rtx,
464 trunc_int_for_mode
465 (frame_offset + bigend_correction
466 + STARTING_FRAME_OFFSET, Pmode));
467 else
468 addr = plus_constant (virtual_stack_vars_rtx,
469 trunc_int_for_mode
470 (function->x_frame_offset + bigend_correction,
471 Pmode));
473 if (!FRAME_GROWS_DOWNWARD)
474 function->x_frame_offset += size;
476 x = gen_rtx_MEM (mode, addr);
477 MEM_NOTRAP_P (x) = 1;
479 function->x_stack_slot_list
480 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
482 /* Try to detect frame size overflows on native platforms. */
483 #if BITS_PER_WORD >= 32
484 if ((FRAME_GROWS_DOWNWARD
485 ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
486 : (unsigned HOST_WIDE_INT) function->x_frame_offset)
487 > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
488 /* Leave room for the fixed part of the frame. */
489 - 64 * UNITS_PER_WORD)
491 error ("%Jtotal size of local objects too large", function->decl);
492 /* Avoid duplicate error messages as much as possible. */
493 function->x_frame_offset = 0;
495 #endif
497 return x;
500 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
501 current function. */
504 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
506 return assign_stack_local_1 (mode, size, align, cfun);
510 /* Removes temporary slot TEMP from LIST. */
512 static void
513 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
515 if (temp->next)
516 temp->next->prev = temp->prev;
517 if (temp->prev)
518 temp->prev->next = temp->next;
519 else
520 *list = temp->next;
522 temp->prev = temp->next = NULL;
525 /* Inserts temporary slot TEMP to LIST. */
527 static void
528 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
530 temp->next = *list;
531 if (*list)
532 (*list)->prev = temp;
533 temp->prev = NULL;
534 *list = temp;
537 /* Returns the list of used temp slots at LEVEL. */
539 static struct temp_slot **
540 temp_slots_at_level (int level)
543 if (!used_temp_slots)
544 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
546 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
547 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
549 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
552 /* Returns the maximal temporary slot level. */
554 static int
555 max_slot_level (void)
557 if (!used_temp_slots)
558 return -1;
560 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
563 /* Moves temporary slot TEMP to LEVEL. */
565 static void
566 move_slot_to_level (struct temp_slot *temp, int level)
568 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
569 insert_slot_to_list (temp, temp_slots_at_level (level));
570 temp->level = level;
573 /* Make temporary slot TEMP available. */
575 static void
576 make_slot_available (struct temp_slot *temp)
578 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
579 insert_slot_to_list (temp, &avail_temp_slots);
580 temp->in_use = 0;
581 temp->level = -1;
584 /* Allocate a temporary stack slot and record it for possible later
585 reuse.
587 MODE is the machine mode to be given to the returned rtx.
589 SIZE is the size in units of the space required. We do no rounding here
590 since assign_stack_local will do any required rounding.
592 KEEP is 1 if this slot is to be retained after a call to
593 free_temp_slots. Automatic variables for a block are allocated
594 with this flag. KEEP values of 2 or 3 were needed respectively
595 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
596 or for SAVE_EXPRs, but they are now unused.
598 TYPE is the type that will be used for the stack slot. */
601 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
602 int keep, tree type)
604 unsigned int align;
605 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
606 rtx slot;
608 /* If SIZE is -1 it means that somebody tried to allocate a temporary
609 of a variable size. */
610 gcc_assert (size != -1);
612 /* These are now unused. */
613 gcc_assert (keep <= 1);
615 if (mode == BLKmode)
616 align = BIGGEST_ALIGNMENT;
617 else
618 align = GET_MODE_ALIGNMENT (mode);
620 if (! type)
621 type = lang_hooks.types.type_for_mode (mode, 0);
623 if (type)
624 align = LOCAL_ALIGNMENT (type, align);
626 /* Try to find an available, already-allocated temporary of the proper
627 mode which meets the size and alignment requirements. Choose the
628 smallest one with the closest alignment.
630 If assign_stack_temp is called outside of the tree->rtl expansion,
631 we cannot reuse the stack slots (that may still refer to
632 VIRTUAL_STACK_VARS_REGNUM). */
633 if (!virtuals_instantiated)
635 for (p = avail_temp_slots; p; p = p->next)
637 if (p->align >= align && p->size >= size
638 && GET_MODE (p->slot) == mode
639 && objects_must_conflict_p (p->type, type)
640 && (best_p == 0 || best_p->size > p->size
641 || (best_p->size == p->size && best_p->align > p->align)))
643 if (p->align == align && p->size == size)
645 selected = p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647 best_p = 0;
648 break;
650 best_p = p;
655 /* Make our best, if any, the one to use. */
656 if (best_p)
658 selected = best_p;
659 cut_slot_from_list (selected, &avail_temp_slots);
661 /* If there are enough aligned bytes left over, make them into a new
662 temp_slot so that the extra bytes don't get wasted. Do this only
663 for BLKmode slots, so that we can be sure of the alignment. */
664 if (GET_MODE (best_p->slot) == BLKmode)
666 int alignment = best_p->align / BITS_PER_UNIT;
667 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
669 if (best_p->size - rounded_size >= alignment)
671 p = ggc_alloc (sizeof (struct temp_slot));
672 p->in_use = p->addr_taken = 0;
673 p->size = best_p->size - rounded_size;
674 p->base_offset = best_p->base_offset + rounded_size;
675 p->full_size = best_p->full_size - rounded_size;
676 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
677 p->align = best_p->align;
678 p->address = 0;
679 p->type = best_p->type;
680 insert_slot_to_list (p, &avail_temp_slots);
682 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
683 stack_slot_list);
685 best_p->size = rounded_size;
686 best_p->full_size = rounded_size;
691 /* If we still didn't find one, make a new temporary. */
692 if (selected == 0)
694 HOST_WIDE_INT frame_offset_old = frame_offset;
696 p = ggc_alloc (sizeof (struct temp_slot));
698 /* We are passing an explicit alignment request to assign_stack_local.
699 One side effect of that is assign_stack_local will not round SIZE
700 to ensure the frame offset remains suitably aligned.
702 So for requests which depended on the rounding of SIZE, we go ahead
703 and round it now. We also make sure ALIGNMENT is at least
704 BIGGEST_ALIGNMENT. */
705 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
706 p->slot = assign_stack_local (mode,
707 (mode == BLKmode
708 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
709 : size),
710 align);
712 p->align = align;
714 /* The following slot size computation is necessary because we don't
715 know the actual size of the temporary slot until assign_stack_local
716 has performed all the frame alignment and size rounding for the
717 requested temporary. Note that extra space added for alignment
718 can be either above or below this stack slot depending on which
719 way the frame grows. We include the extra space if and only if it
720 is above this slot. */
721 if (FRAME_GROWS_DOWNWARD)
722 p->size = frame_offset_old - frame_offset;
723 else
724 p->size = size;
726 /* Now define the fields used by combine_temp_slots. */
727 if (FRAME_GROWS_DOWNWARD)
729 p->base_offset = frame_offset;
730 p->full_size = frame_offset_old - frame_offset;
732 else
734 p->base_offset = frame_offset_old;
735 p->full_size = frame_offset - frame_offset_old;
737 p->address = 0;
739 selected = p;
742 p = selected;
743 p->in_use = 1;
744 p->addr_taken = 0;
745 p->type = type;
746 p->level = temp_slot_level;
747 p->keep = keep;
749 pp = temp_slots_at_level (p->level);
750 insert_slot_to_list (p, pp);
752 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
753 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
754 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
756 /* If we know the alias set for the memory that will be used, use
757 it. If there's no TYPE, then we don't know anything about the
758 alias set for the memory. */
759 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
760 set_mem_align (slot, align);
762 /* If a type is specified, set the relevant flags. */
763 if (type != 0)
765 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
766 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
768 MEM_NOTRAP_P (slot) = 1;
770 return slot;
773 /* Allocate a temporary stack slot and record it for possible later
774 reuse. First three arguments are same as in preceding function. */
777 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
779 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
782 /* Assign a temporary.
783 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
784 and so that should be used in error messages. In either case, we
785 allocate of the given type.
786 KEEP is as for assign_stack_temp.
787 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
788 it is 0 if a register is OK.
789 DONT_PROMOTE is 1 if we should not promote values in register
790 to wider modes. */
793 assign_temp (tree type_or_decl, int keep, int memory_required,
794 int dont_promote ATTRIBUTE_UNUSED)
796 tree type, decl;
797 enum machine_mode mode;
798 #ifdef PROMOTE_MODE
799 int unsignedp;
800 #endif
802 if (DECL_P (type_or_decl))
803 decl = type_or_decl, type = TREE_TYPE (decl);
804 else
805 decl = NULL, type = type_or_decl;
807 mode = TYPE_MODE (type);
808 #ifdef PROMOTE_MODE
809 unsignedp = TYPE_UNSIGNED (type);
810 #endif
812 if (mode == BLKmode || memory_required)
814 HOST_WIDE_INT size = int_size_in_bytes (type);
815 tree size_tree;
816 rtx tmp;
818 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
819 problems with allocating the stack space. */
820 if (size == 0)
821 size = 1;
823 /* Unfortunately, we don't yet know how to allocate variable-sized
824 temporaries. However, sometimes we have a fixed upper limit on
825 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
826 instead. This is the case for Chill variable-sized strings. */
827 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
828 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
829 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
830 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
832 /* If we still haven't been able to get a size, see if the language
833 can compute a maximum size. */
834 if (size == -1
835 && (size_tree = lang_hooks.types.max_size (type)) != 0
836 && host_integerp (size_tree, 1))
837 size = tree_low_cst (size_tree, 1);
839 /* The size of the temporary may be too large to fit into an integer. */
840 /* ??? Not sure this should happen except for user silliness, so limit
841 this to things that aren't compiler-generated temporaries. The
842 rest of the time we'll die in assign_stack_temp_for_type. */
843 if (decl && size == -1
844 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
846 error ("size of variable %q+D is too large", decl);
847 size = 1;
850 tmp = assign_stack_temp_for_type (mode, size, keep, type);
851 return tmp;
854 #ifdef PROMOTE_MODE
855 if (! dont_promote)
856 mode = promote_mode (type, mode, &unsignedp, 0);
857 #endif
859 return gen_reg_rtx (mode);
862 /* Combine temporary stack slots which are adjacent on the stack.
864 This allows for better use of already allocated stack space. This is only
865 done for BLKmode slots because we can be sure that we won't have alignment
866 problems in this case. */
868 static void
869 combine_temp_slots (void)
871 struct temp_slot *p, *q, *next, *next_q;
872 int num_slots;
874 /* We can't combine slots, because the information about which slot
875 is in which alias set will be lost. */
876 if (flag_strict_aliasing)
877 return;
879 /* If there are a lot of temp slots, don't do anything unless
880 high levels of optimization. */
881 if (! flag_expensive_optimizations)
882 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
883 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
884 return;
886 for (p = avail_temp_slots; p; p = next)
888 int delete_p = 0;
890 next = p->next;
892 if (GET_MODE (p->slot) != BLKmode)
893 continue;
895 for (q = p->next; q; q = next_q)
897 int delete_q = 0;
899 next_q = q->next;
901 if (GET_MODE (q->slot) != BLKmode)
902 continue;
904 if (p->base_offset + p->full_size == q->base_offset)
906 /* Q comes after P; combine Q into P. */
907 p->size += q->size;
908 p->full_size += q->full_size;
909 delete_q = 1;
911 else if (q->base_offset + q->full_size == p->base_offset)
913 /* P comes after Q; combine P into Q. */
914 q->size += p->size;
915 q->full_size += p->full_size;
916 delete_p = 1;
917 break;
919 if (delete_q)
920 cut_slot_from_list (q, &avail_temp_slots);
923 /* Either delete P or advance past it. */
924 if (delete_p)
925 cut_slot_from_list (p, &avail_temp_slots);
929 /* Find the temp slot corresponding to the object at address X. */
931 static struct temp_slot *
932 find_temp_slot_from_address (rtx x)
934 struct temp_slot *p;
935 rtx next;
936 int i;
938 for (i = max_slot_level (); i >= 0; i--)
939 for (p = *temp_slots_at_level (i); p; p = p->next)
941 if (XEXP (p->slot, 0) == x
942 || p->address == x
943 || (GET_CODE (x) == PLUS
944 && XEXP (x, 0) == virtual_stack_vars_rtx
945 && GET_CODE (XEXP (x, 1)) == CONST_INT
946 && INTVAL (XEXP (x, 1)) >= p->base_offset
947 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
948 return p;
950 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
951 for (next = p->address; next; next = XEXP (next, 1))
952 if (XEXP (next, 0) == x)
953 return p;
956 /* If we have a sum involving a register, see if it points to a temp
957 slot. */
958 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
959 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
960 return p;
961 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
962 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
963 return p;
965 return 0;
968 /* Indicate that NEW is an alternate way of referring to the temp slot
969 that previously was known by OLD. */
971 void
972 update_temp_slot_address (rtx old, rtx new)
974 struct temp_slot *p;
976 if (rtx_equal_p (old, new))
977 return;
979 p = find_temp_slot_from_address (old);
981 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
982 is a register, see if one operand of the PLUS is a temporary
983 location. If so, NEW points into it. Otherwise, if both OLD and
984 NEW are a PLUS and if there is a register in common between them.
985 If so, try a recursive call on those values. */
986 if (p == 0)
988 if (GET_CODE (old) != PLUS)
989 return;
991 if (REG_P (new))
993 update_temp_slot_address (XEXP (old, 0), new);
994 update_temp_slot_address (XEXP (old, 1), new);
995 return;
997 else if (GET_CODE (new) != PLUS)
998 return;
1000 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1003 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1004 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1006 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1007 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1009 return;
1012 /* Otherwise add an alias for the temp's address. */
1013 else if (p->address == 0)
1014 p->address = new;
1015 else
1017 if (GET_CODE (p->address) != EXPR_LIST)
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1020 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1024 /* If X could be a reference to a temporary slot, mark the fact that its
1025 address was taken. */
1027 void
1028 mark_temp_addr_taken (rtx x)
1030 struct temp_slot *p;
1032 if (x == 0)
1033 return;
1035 /* If X is not in memory or is at a constant address, it cannot be in
1036 a temporary slot. */
1037 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1038 return;
1040 p = find_temp_slot_from_address (XEXP (x, 0));
1041 if (p != 0)
1042 p->addr_taken = 1;
1045 /* If X could be a reference to a temporary slot, mark that slot as
1046 belonging to the to one level higher than the current level. If X
1047 matched one of our slots, just mark that one. Otherwise, we can't
1048 easily predict which it is, so upgrade all of them. Kept slots
1049 need not be touched.
1051 This is called when an ({...}) construct occurs and a statement
1052 returns a value in memory. */
1054 void
1055 preserve_temp_slots (rtx x)
1057 struct temp_slot *p = 0, *next;
1059 /* If there is no result, we still might have some objects whose address
1060 were taken, so we need to make sure they stay around. */
1061 if (x == 0)
1063 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1065 next = p->next;
1067 if (p->addr_taken)
1068 move_slot_to_level (p, temp_slot_level - 1);
1071 return;
1074 /* If X is a register that is being used as a pointer, see if we have
1075 a temporary slot we know it points to. To be consistent with
1076 the code below, we really should preserve all non-kept slots
1077 if we can't find a match, but that seems to be much too costly. */
1078 if (REG_P (x) && REG_POINTER (x))
1079 p = find_temp_slot_from_address (x);
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot, but it can contain something whose address was
1083 taken. */
1084 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1086 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1088 next = p->next;
1090 if (p->addr_taken)
1091 move_slot_to_level (p, temp_slot_level - 1);
1094 return;
1097 /* First see if we can find a match. */
1098 if (p == 0)
1099 p = find_temp_slot_from_address (XEXP (x, 0));
1101 if (p != 0)
1103 /* Move everything at our level whose address was taken to our new
1104 level in case we used its address. */
1105 struct temp_slot *q;
1107 if (p->level == temp_slot_level)
1109 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1111 next = q->next;
1113 if (p != q && q->addr_taken)
1114 move_slot_to_level (q, temp_slot_level - 1);
1117 move_slot_to_level (p, temp_slot_level - 1);
1118 p->addr_taken = 0;
1120 return;
1123 /* Otherwise, preserve all non-kept slots at this level. */
1124 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1126 next = p->next;
1128 if (!p->keep)
1129 move_slot_to_level (p, temp_slot_level - 1);
1133 /* Free all temporaries used so far. This is normally called at the
1134 end of generating code for a statement. */
1136 void
1137 free_temp_slots (void)
1139 struct temp_slot *p, *next;
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1143 next = p->next;
1145 if (!p->keep)
1146 make_slot_available (p);
1149 combine_temp_slots ();
1152 /* Push deeper into the nesting level for stack temporaries. */
1154 void
1155 push_temp_slots (void)
1157 temp_slot_level++;
1160 /* Pop a temporary nesting level. All slots in use in the current level
1161 are freed. */
1163 void
1164 pop_temp_slots (void)
1166 struct temp_slot *p, *next;
1168 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1170 next = p->next;
1171 make_slot_available (p);
1174 combine_temp_slots ();
1176 temp_slot_level--;
1179 /* Initialize temporary slots. */
1181 void
1182 init_temp_slots (void)
1184 /* We have not allocated any temporaries yet. */
1185 avail_temp_slots = 0;
1186 used_temp_slots = 0;
1187 temp_slot_level = 0;
1190 /* These routines are responsible for converting virtual register references
1191 to the actual hard register references once RTL generation is complete.
1193 The following four variables are used for communication between the
1194 routines. They contain the offsets of the virtual registers from their
1195 respective hard registers. */
1197 static int in_arg_offset;
1198 static int var_offset;
1199 static int dynamic_offset;
1200 static int out_arg_offset;
1201 static int cfa_offset;
1203 /* In most machines, the stack pointer register is equivalent to the bottom
1204 of the stack. */
1206 #ifndef STACK_POINTER_OFFSET
1207 #define STACK_POINTER_OFFSET 0
1208 #endif
1210 /* If not defined, pick an appropriate default for the offset of dynamically
1211 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1212 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1214 #ifndef STACK_DYNAMIC_OFFSET
1216 /* The bottom of the stack points to the actual arguments. If
1217 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1218 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1219 stack space for register parameters is not pushed by the caller, but
1220 rather part of the fixed stack areas and hence not included in
1221 `current_function_outgoing_args_size'. Nevertheless, we must allow
1222 for it when allocating stack dynamic objects. */
1224 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1225 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1226 ((ACCUMULATE_OUTGOING_ARGS \
1227 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1228 + (STACK_POINTER_OFFSET)) \
1230 #else
1231 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1232 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1233 + (STACK_POINTER_OFFSET))
1234 #endif
1235 #endif
1238 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1239 is a virtual register, return the equivalent hard register and set the
1240 offset indirectly through the pointer. Otherwise, return 0. */
1242 static rtx
1243 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1245 rtx new;
1246 HOST_WIDE_INT offset;
1248 if (x == virtual_incoming_args_rtx)
1249 new = arg_pointer_rtx, offset = in_arg_offset;
1250 else if (x == virtual_stack_vars_rtx)
1251 new = frame_pointer_rtx, offset = var_offset;
1252 else if (x == virtual_stack_dynamic_rtx)
1253 new = stack_pointer_rtx, offset = dynamic_offset;
1254 else if (x == virtual_outgoing_args_rtx)
1255 new = stack_pointer_rtx, offset = out_arg_offset;
1256 else if (x == virtual_cfa_rtx)
1258 #ifdef FRAME_POINTER_CFA_OFFSET
1259 new = frame_pointer_rtx;
1260 #else
1261 new = arg_pointer_rtx;
1262 #endif
1263 offset = cfa_offset;
1265 else
1266 return NULL_RTX;
1268 *poffset = offset;
1269 return new;
1272 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1273 Instantiate any virtual registers present inside of *LOC. The expression
1274 is simplified, as much as possible, but is not to be considered "valid"
1275 in any sense implied by the target. If any change is made, set CHANGED
1276 to true. */
1278 static int
1279 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1281 HOST_WIDE_INT offset;
1282 bool *changed = (bool *) data;
1283 rtx x, new;
1285 x = *loc;
1286 if (x == 0)
1287 return 0;
1289 switch (GET_CODE (x))
1291 case REG:
1292 new = instantiate_new_reg (x, &offset);
1293 if (new)
1295 *loc = plus_constant (new, offset);
1296 if (changed)
1297 *changed = true;
1299 return -1;
1301 case PLUS:
1302 new = instantiate_new_reg (XEXP (x, 0), &offset);
1303 if (new)
1305 new = plus_constant (new, offset);
1306 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1307 if (changed)
1308 *changed = true;
1309 return -1;
1312 /* FIXME -- from old code */
1313 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1314 we can commute the PLUS and SUBREG because pointers into the
1315 frame are well-behaved. */
1316 break;
1318 default:
1319 break;
1322 return 0;
1325 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1326 matches the predicate for insn CODE operand OPERAND. */
1328 static int
1329 safe_insn_predicate (int code, int operand, rtx x)
1331 const struct insn_operand_data *op_data;
1333 if (code < 0)
1334 return true;
1336 op_data = &insn_data[code].operand[operand];
1337 if (op_data->predicate == NULL)
1338 return true;
1340 return op_data->predicate (x, op_data->mode);
1343 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1344 registers present inside of insn. The result will be a valid insn. */
1346 static void
1347 instantiate_virtual_regs_in_insn (rtx insn)
1349 HOST_WIDE_INT offset;
1350 int insn_code, i;
1351 bool any_change = false;
1352 rtx set, new, x, seq;
1354 /* There are some special cases to be handled first. */
1355 set = single_set (insn);
1356 if (set)
1358 /* We're allowed to assign to a virtual register. This is interpreted
1359 to mean that the underlying register gets assigned the inverse
1360 transformation. This is used, for example, in the handling of
1361 non-local gotos. */
1362 new = instantiate_new_reg (SET_DEST (set), &offset);
1363 if (new)
1365 start_sequence ();
1367 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1368 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1369 GEN_INT (-offset));
1370 x = force_operand (x, new);
1371 if (x != new)
1372 emit_move_insn (new, x);
1374 seq = get_insns ();
1375 end_sequence ();
1377 emit_insn_before (seq, insn);
1378 delete_insn (insn);
1379 return;
1382 /* Handle a straight copy from a virtual register by generating a
1383 new add insn. The difference between this and falling through
1384 to the generic case is avoiding a new pseudo and eliminating a
1385 move insn in the initial rtl stream. */
1386 new = instantiate_new_reg (SET_SRC (set), &offset);
1387 if (new && offset != 0
1388 && REG_P (SET_DEST (set))
1389 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1391 start_sequence ();
1393 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1394 new, GEN_INT (offset), SET_DEST (set),
1395 1, OPTAB_LIB_WIDEN);
1396 if (x != SET_DEST (set))
1397 emit_move_insn (SET_DEST (set), x);
1399 seq = get_insns ();
1400 end_sequence ();
1402 emit_insn_before (seq, insn);
1403 delete_insn (insn);
1404 return;
1407 extract_insn (insn);
1408 insn_code = INSN_CODE (insn);
1410 /* Handle a plus involving a virtual register by determining if the
1411 operands remain valid if they're modified in place. */
1412 if (GET_CODE (SET_SRC (set)) == PLUS
1413 && recog_data.n_operands >= 3
1414 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1415 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1416 && GET_CODE (recog_data.operand[2]) == CONST_INT
1417 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1419 offset += INTVAL (recog_data.operand[2]);
1421 /* If the sum is zero, then replace with a plain move. */
1422 if (offset == 0
1423 && REG_P (SET_DEST (set))
1424 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1426 start_sequence ();
1427 emit_move_insn (SET_DEST (set), new);
1428 seq = get_insns ();
1429 end_sequence ();
1431 emit_insn_before (seq, insn);
1432 delete_insn (insn);
1433 return;
1436 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1438 /* Using validate_change and apply_change_group here leaves
1439 recog_data in an invalid state. Since we know exactly what
1440 we want to check, do those two by hand. */
1441 if (safe_insn_predicate (insn_code, 1, new)
1442 && safe_insn_predicate (insn_code, 2, x))
1444 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1445 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1446 any_change = true;
1448 /* Fall through into the regular operand fixup loop in
1449 order to take care of operands other than 1 and 2. */
1453 else
1455 extract_insn (insn);
1456 insn_code = INSN_CODE (insn);
1459 /* In the general case, we expect virtual registers to appear only in
1460 operands, and then only as either bare registers or inside memories. */
1461 for (i = 0; i < recog_data.n_operands; ++i)
1463 x = recog_data.operand[i];
1464 switch (GET_CODE (x))
1466 case MEM:
1468 rtx addr = XEXP (x, 0);
1469 bool changed = false;
1471 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1472 if (!changed)
1473 continue;
1475 start_sequence ();
1476 x = replace_equiv_address (x, addr);
1477 seq = get_insns ();
1478 end_sequence ();
1479 if (seq)
1480 emit_insn_before (seq, insn);
1482 break;
1484 case REG:
1485 new = instantiate_new_reg (x, &offset);
1486 if (new == NULL)
1487 continue;
1488 if (offset == 0)
1489 x = new;
1490 else
1492 start_sequence ();
1494 /* Careful, special mode predicates may have stuff in
1495 insn_data[insn_code].operand[i].mode that isn't useful
1496 to us for computing a new value. */
1497 /* ??? Recognize address_operand and/or "p" constraints
1498 to see if (plus new offset) is a valid before we put
1499 this through expand_simple_binop. */
1500 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1501 GEN_INT (offset), NULL_RTX,
1502 1, OPTAB_LIB_WIDEN);
1503 seq = get_insns ();
1504 end_sequence ();
1505 emit_insn_before (seq, insn);
1507 break;
1509 case SUBREG:
1510 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1511 if (new == NULL)
1512 continue;
1513 if (offset != 0)
1515 start_sequence ();
1516 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1517 GEN_INT (offset), NULL_RTX,
1518 1, OPTAB_LIB_WIDEN);
1519 seq = get_insns ();
1520 end_sequence ();
1521 emit_insn_before (seq, insn);
1523 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1524 GET_MODE (new), SUBREG_BYTE (x));
1525 break;
1527 default:
1528 continue;
1531 /* At this point, X contains the new value for the operand.
1532 Validate the new value vs the insn predicate. Note that
1533 asm insns will have insn_code -1 here. */
1534 if (!safe_insn_predicate (insn_code, i, x))
1535 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1537 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1538 any_change = true;
1541 if (any_change)
1543 /* Propagate operand changes into the duplicates. */
1544 for (i = 0; i < recog_data.n_dups; ++i)
1545 *recog_data.dup_loc[i]
1546 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1548 /* Force re-recognition of the instruction for validation. */
1549 INSN_CODE (insn) = -1;
1552 if (asm_noperands (PATTERN (insn)) >= 0)
1554 if (!check_asm_operands (PATTERN (insn)))
1556 error_for_asm (insn, "impossible constraint in %<asm%>");
1557 delete_insn (insn);
1560 else
1562 if (recog_memoized (insn) < 0)
1563 fatal_insn_not_found (insn);
1567 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1568 do any instantiation required. */
1570 static void
1571 instantiate_decl (rtx x)
1573 rtx addr;
1575 if (x == 0)
1576 return;
1578 /* If this is a CONCAT, recurse for the pieces. */
1579 if (GET_CODE (x) == CONCAT)
1581 instantiate_decl (XEXP (x, 0));
1582 instantiate_decl (XEXP (x, 1));
1583 return;
1586 /* If this is not a MEM, no need to do anything. Similarly if the
1587 address is a constant or a register that is not a virtual register. */
1588 if (!MEM_P (x))
1589 return;
1591 addr = XEXP (x, 0);
1592 if (CONSTANT_P (addr)
1593 || (REG_P (addr)
1594 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1595 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1596 return;
1598 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1601 /* Helper for instantiate_decls called via walk_tree: Process all decls
1602 in the given DECL_VALUE_EXPR. */
1604 static tree
1605 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1607 tree t = *tp;
1608 if (! EXPR_P (t))
1610 *walk_subtrees = 0;
1611 if (DECL_P (t) && DECL_RTL_SET_P (t))
1612 instantiate_decl (DECL_RTL (t));
1614 return NULL;
1617 /* Subroutine of instantiate_decls: Process all decls in the given
1618 BLOCK node and all its subblocks. */
1620 static void
1621 instantiate_decls_1 (tree let)
1623 tree t;
1625 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1627 if (DECL_RTL_SET_P (t))
1628 instantiate_decl (DECL_RTL (t));
1629 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1631 tree v = DECL_VALUE_EXPR (t);
1632 walk_tree (&v, instantiate_expr, NULL, NULL);
1636 /* Process all subblocks. */
1637 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1638 instantiate_decls_1 (t);
1641 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1642 all virtual registers in their DECL_RTL's. */
1644 static void
1645 instantiate_decls (tree fndecl)
1647 tree decl;
1649 /* Process all parameters of the function. */
1650 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1652 instantiate_decl (DECL_RTL (decl));
1653 instantiate_decl (DECL_INCOMING_RTL (decl));
1654 if (DECL_HAS_VALUE_EXPR_P (decl))
1656 tree v = DECL_VALUE_EXPR (decl);
1657 walk_tree (&v, instantiate_expr, NULL, NULL);
1661 /* Now process all variables defined in the function or its subblocks. */
1662 instantiate_decls_1 (DECL_INITIAL (fndecl));
1665 /* Pass through the INSNS of function FNDECL and convert virtual register
1666 references to hard register references. */
1668 static void
1669 instantiate_virtual_regs (void)
1671 rtx insn;
1673 /* Compute the offsets to use for this function. */
1674 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1675 var_offset = STARTING_FRAME_OFFSET;
1676 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1677 out_arg_offset = STACK_POINTER_OFFSET;
1678 #ifdef FRAME_POINTER_CFA_OFFSET
1679 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1680 #else
1681 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1682 #endif
1684 /* Initialize recognition, indicating that volatile is OK. */
1685 init_recog ();
1687 /* Scan through all the insns, instantiating every virtual register still
1688 present. */
1689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1690 if (INSN_P (insn))
1692 /* These patterns in the instruction stream can never be recognized.
1693 Fortunately, they shouldn't contain virtual registers either. */
1694 if (GET_CODE (PATTERN (insn)) == USE
1695 || GET_CODE (PATTERN (insn)) == CLOBBER
1696 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1697 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1698 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1699 continue;
1701 instantiate_virtual_regs_in_insn (insn);
1703 if (INSN_DELETED_P (insn))
1704 continue;
1706 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1708 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1709 if (GET_CODE (insn) == CALL_INSN)
1710 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1711 instantiate_virtual_regs_in_rtx, NULL);
1714 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1715 instantiate_decls (current_function_decl);
1717 /* Indicate that, from now on, assign_stack_local should use
1718 frame_pointer_rtx. */
1719 virtuals_instantiated = 1;
1722 struct tree_opt_pass pass_instantiate_virtual_regs =
1724 "vregs", /* name */
1725 NULL, /* gate */
1726 instantiate_virtual_regs, /* execute */
1727 NULL, /* sub */
1728 NULL, /* next */
1729 0, /* static_pass_number */
1730 0, /* tv_id */
1731 0, /* properties_required */
1732 0, /* properties_provided */
1733 0, /* properties_destroyed */
1734 0, /* todo_flags_start */
1735 TODO_dump_func, /* todo_flags_finish */
1736 0 /* letter */
1740 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1741 This means a type for which function calls must pass an address to the
1742 function or get an address back from the function.
1743 EXP may be a type node or an expression (whose type is tested). */
1746 aggregate_value_p (tree exp, tree fntype)
1748 int i, regno, nregs;
1749 rtx reg;
1751 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1753 if (fntype)
1754 switch (TREE_CODE (fntype))
1756 case CALL_EXPR:
1757 fntype = get_callee_fndecl (fntype);
1758 fntype = fntype ? TREE_TYPE (fntype) : 0;
1759 break;
1760 case FUNCTION_DECL:
1761 fntype = TREE_TYPE (fntype);
1762 break;
1763 case FUNCTION_TYPE:
1764 case METHOD_TYPE:
1765 break;
1766 case IDENTIFIER_NODE:
1767 fntype = 0;
1768 break;
1769 default:
1770 /* We don't expect other rtl types here. */
1771 gcc_unreachable ();
1774 if (TREE_CODE (type) == VOID_TYPE)
1775 return 0;
1776 /* If the front end has decided that this needs to be passed by
1777 reference, do so. */
1778 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1779 && DECL_BY_REFERENCE (exp))
1780 return 1;
1781 if (targetm.calls.return_in_memory (type, fntype))
1782 return 1;
1783 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1784 and thus can't be returned in registers. */
1785 if (TREE_ADDRESSABLE (type))
1786 return 1;
1787 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1788 return 1;
1789 /* Make sure we have suitable call-clobbered regs to return
1790 the value in; if not, we must return it in memory. */
1791 reg = hard_function_value (type, 0, fntype, 0);
1793 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1794 it is OK. */
1795 if (!REG_P (reg))
1796 return 0;
1798 regno = REGNO (reg);
1799 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1800 for (i = 0; i < nregs; i++)
1801 if (! call_used_regs[regno + i])
1802 return 1;
1803 return 0;
1806 /* Return true if we should assign DECL a pseudo register; false if it
1807 should live on the local stack. */
1809 bool
1810 use_register_for_decl (tree decl)
1812 /* Honor volatile. */
1813 if (TREE_SIDE_EFFECTS (decl))
1814 return false;
1816 /* Honor addressability. */
1817 if (TREE_ADDRESSABLE (decl))
1818 return false;
1820 /* Only register-like things go in registers. */
1821 if (DECL_MODE (decl) == BLKmode)
1822 return false;
1824 /* If -ffloat-store specified, don't put explicit float variables
1825 into registers. */
1826 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1827 propagates values across these stores, and it probably shouldn't. */
1828 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1829 return false;
1831 /* If we're not interested in tracking debugging information for
1832 this decl, then we can certainly put it in a register. */
1833 if (DECL_IGNORED_P (decl))
1834 return true;
1836 return (optimize || DECL_REGISTER (decl));
1839 /* Return true if TYPE should be passed by invisible reference. */
1841 bool
1842 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1843 tree type, bool named_arg)
1845 if (type)
1847 /* If this type contains non-trivial constructors, then it is
1848 forbidden for the middle-end to create any new copies. */
1849 if (TREE_ADDRESSABLE (type))
1850 return true;
1852 /* GCC post 3.4 passes *all* variable sized types by reference. */
1853 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1854 return true;
1857 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1860 /* Return true if TYPE, which is passed by reference, should be callee
1861 copied instead of caller copied. */
1863 bool
1864 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1865 tree type, bool named_arg)
1867 if (type && TREE_ADDRESSABLE (type))
1868 return false;
1869 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1872 /* Structures to communicate between the subroutines of assign_parms.
1873 The first holds data persistent across all parameters, the second
1874 is cleared out for each parameter. */
1876 struct assign_parm_data_all
1878 CUMULATIVE_ARGS args_so_far;
1879 struct args_size stack_args_size;
1880 tree function_result_decl;
1881 tree orig_fnargs;
1882 rtx conversion_insns;
1883 HOST_WIDE_INT pretend_args_size;
1884 HOST_WIDE_INT extra_pretend_bytes;
1885 int reg_parm_stack_space;
1888 struct assign_parm_data_one
1890 tree nominal_type;
1891 tree passed_type;
1892 rtx entry_parm;
1893 rtx stack_parm;
1894 enum machine_mode nominal_mode;
1895 enum machine_mode passed_mode;
1896 enum machine_mode promoted_mode;
1897 struct locate_and_pad_arg_data locate;
1898 int partial;
1899 BOOL_BITFIELD named_arg : 1;
1900 BOOL_BITFIELD passed_pointer : 1;
1901 BOOL_BITFIELD on_stack : 1;
1902 BOOL_BITFIELD loaded_in_reg : 1;
1905 /* A subroutine of assign_parms. Initialize ALL. */
1907 static void
1908 assign_parms_initialize_all (struct assign_parm_data_all *all)
1910 tree fntype;
1912 memset (all, 0, sizeof (*all));
1914 fntype = TREE_TYPE (current_function_decl);
1916 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1917 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1918 #else
1919 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1920 current_function_decl, -1);
1921 #endif
1923 #ifdef REG_PARM_STACK_SPACE
1924 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1925 #endif
1928 /* If ARGS contains entries with complex types, split the entry into two
1929 entries of the component type. Return a new list of substitutions are
1930 needed, else the old list. */
1932 static tree
1933 split_complex_args (tree args)
1935 tree p;
1937 /* Before allocating memory, check for the common case of no complex. */
1938 for (p = args; p; p = TREE_CHAIN (p))
1940 tree type = TREE_TYPE (p);
1941 if (TREE_CODE (type) == COMPLEX_TYPE
1942 && targetm.calls.split_complex_arg (type))
1943 goto found;
1945 return args;
1947 found:
1948 args = copy_list (args);
1950 for (p = args; p; p = TREE_CHAIN (p))
1952 tree type = TREE_TYPE (p);
1953 if (TREE_CODE (type) == COMPLEX_TYPE
1954 && targetm.calls.split_complex_arg (type))
1956 tree decl;
1957 tree subtype = TREE_TYPE (type);
1958 bool addressable = TREE_ADDRESSABLE (p);
1960 /* Rewrite the PARM_DECL's type with its component. */
1961 TREE_TYPE (p) = subtype;
1962 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1963 DECL_MODE (p) = VOIDmode;
1964 DECL_SIZE (p) = NULL;
1965 DECL_SIZE_UNIT (p) = NULL;
1966 /* If this arg must go in memory, put it in a pseudo here.
1967 We can't allow it to go in memory as per normal parms,
1968 because the usual place might not have the imag part
1969 adjacent to the real part. */
1970 DECL_ARTIFICIAL (p) = addressable;
1971 DECL_IGNORED_P (p) = addressable;
1972 TREE_ADDRESSABLE (p) = 0;
1973 layout_decl (p, 0);
1975 /* Build a second synthetic decl. */
1976 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1977 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1978 DECL_ARTIFICIAL (decl) = addressable;
1979 DECL_IGNORED_P (decl) = addressable;
1980 layout_decl (decl, 0);
1982 /* Splice it in; skip the new decl. */
1983 TREE_CHAIN (decl) = TREE_CHAIN (p);
1984 TREE_CHAIN (p) = decl;
1985 p = decl;
1989 return args;
1992 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1993 the hidden struct return argument, and (abi willing) complex args.
1994 Return the new parameter list. */
1996 static tree
1997 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1999 tree fndecl = current_function_decl;
2000 tree fntype = TREE_TYPE (fndecl);
2001 tree fnargs = DECL_ARGUMENTS (fndecl);
2003 /* If struct value address is treated as the first argument, make it so. */
2004 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2005 && ! current_function_returns_pcc_struct
2006 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2008 tree type = build_pointer_type (TREE_TYPE (fntype));
2009 tree decl;
2011 decl = build_decl (PARM_DECL, NULL_TREE, type);
2012 DECL_ARG_TYPE (decl) = type;
2013 DECL_ARTIFICIAL (decl) = 1;
2014 DECL_IGNORED_P (decl) = 1;
2016 TREE_CHAIN (decl) = fnargs;
2017 fnargs = decl;
2018 all->function_result_decl = decl;
2021 all->orig_fnargs = fnargs;
2023 /* If the target wants to split complex arguments into scalars, do so. */
2024 if (targetm.calls.split_complex_arg)
2025 fnargs = split_complex_args (fnargs);
2027 return fnargs;
2030 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2031 data for the parameter. Incorporate ABI specifics such as pass-by-
2032 reference and type promotion. */
2034 static void
2035 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2036 struct assign_parm_data_one *data)
2038 tree nominal_type, passed_type;
2039 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2041 memset (data, 0, sizeof (*data));
2043 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2044 if (!current_function_stdarg)
2045 data->named_arg = 1; /* No varadic parms. */
2046 else if (TREE_CHAIN (parm))
2047 data->named_arg = 1; /* Not the last non-varadic parm. */
2048 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2049 data->named_arg = 1; /* Only varadic ones are unnamed. */
2050 else
2051 data->named_arg = 0; /* Treat as varadic. */
2053 nominal_type = TREE_TYPE (parm);
2054 passed_type = DECL_ARG_TYPE (parm);
2056 /* Look out for errors propagating this far. Also, if the parameter's
2057 type is void then its value doesn't matter. */
2058 if (TREE_TYPE (parm) == error_mark_node
2059 /* This can happen after weird syntax errors
2060 or if an enum type is defined among the parms. */
2061 || TREE_CODE (parm) != PARM_DECL
2062 || passed_type == NULL
2063 || VOID_TYPE_P (nominal_type))
2065 nominal_type = passed_type = void_type_node;
2066 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2067 goto egress;
2070 /* Find mode of arg as it is passed, and mode of arg as it should be
2071 during execution of this function. */
2072 passed_mode = TYPE_MODE (passed_type);
2073 nominal_mode = TYPE_MODE (nominal_type);
2075 /* If the parm is to be passed as a transparent union, use the type of
2076 the first field for the tests below. We have already verified that
2077 the modes are the same. */
2078 if (TREE_CODE (passed_type) == UNION_TYPE
2079 && TYPE_TRANSPARENT_UNION (passed_type))
2080 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2082 /* See if this arg was passed by invisible reference. */
2083 if (pass_by_reference (&all->args_so_far, passed_mode,
2084 passed_type, data->named_arg))
2086 passed_type = nominal_type = build_pointer_type (passed_type);
2087 data->passed_pointer = true;
2088 passed_mode = nominal_mode = Pmode;
2091 /* Find mode as it is passed by the ABI. */
2092 promoted_mode = passed_mode;
2093 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2095 int unsignedp = TYPE_UNSIGNED (passed_type);
2096 promoted_mode = promote_mode (passed_type, promoted_mode,
2097 &unsignedp, 1);
2100 egress:
2101 data->nominal_type = nominal_type;
2102 data->passed_type = passed_type;
2103 data->nominal_mode = nominal_mode;
2104 data->passed_mode = passed_mode;
2105 data->promoted_mode = promoted_mode;
2108 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2110 static void
2111 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2112 struct assign_parm_data_one *data, bool no_rtl)
2114 int varargs_pretend_bytes = 0;
2116 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2117 data->promoted_mode,
2118 data->passed_type,
2119 &varargs_pretend_bytes, no_rtl);
2121 /* If the back-end has requested extra stack space, record how much is
2122 needed. Do not change pretend_args_size otherwise since it may be
2123 nonzero from an earlier partial argument. */
2124 if (varargs_pretend_bytes > 0)
2125 all->pretend_args_size = varargs_pretend_bytes;
2128 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2129 the incoming location of the current parameter. */
2131 static void
2132 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2133 struct assign_parm_data_one *data)
2135 HOST_WIDE_INT pretend_bytes = 0;
2136 rtx entry_parm;
2137 bool in_regs;
2139 if (data->promoted_mode == VOIDmode)
2141 data->entry_parm = data->stack_parm = const0_rtx;
2142 return;
2145 #ifdef FUNCTION_INCOMING_ARG
2146 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2147 data->passed_type, data->named_arg);
2148 #else
2149 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2150 data->passed_type, data->named_arg);
2151 #endif
2153 if (entry_parm == 0)
2154 data->promoted_mode = data->passed_mode;
2156 /* Determine parm's home in the stack, in case it arrives in the stack
2157 or we should pretend it did. Compute the stack position and rtx where
2158 the argument arrives and its size.
2160 There is one complexity here: If this was a parameter that would
2161 have been passed in registers, but wasn't only because it is
2162 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2163 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2164 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2165 as it was the previous time. */
2166 in_regs = entry_parm != 0;
2167 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2168 in_regs = true;
2169 #endif
2170 if (!in_regs && !data->named_arg)
2172 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2174 rtx tem;
2175 #ifdef FUNCTION_INCOMING_ARG
2176 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2177 data->passed_type, true);
2178 #else
2179 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2180 data->passed_type, true);
2181 #endif
2182 in_regs = tem != NULL;
2186 /* If this parameter was passed both in registers and in the stack, use
2187 the copy on the stack. */
2188 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2189 data->passed_type))
2190 entry_parm = 0;
2192 if (entry_parm)
2194 int partial;
2196 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2197 data->promoted_mode,
2198 data->passed_type,
2199 data->named_arg);
2200 data->partial = partial;
2202 /* The caller might already have allocated stack space for the
2203 register parameters. */
2204 if (partial != 0 && all->reg_parm_stack_space == 0)
2206 /* Part of this argument is passed in registers and part
2207 is passed on the stack. Ask the prologue code to extend
2208 the stack part so that we can recreate the full value.
2210 PRETEND_BYTES is the size of the registers we need to store.
2211 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2212 stack space that the prologue should allocate.
2214 Internally, gcc assumes that the argument pointer is aligned
2215 to STACK_BOUNDARY bits. This is used both for alignment
2216 optimizations (see init_emit) and to locate arguments that are
2217 aligned to more than PARM_BOUNDARY bits. We must preserve this
2218 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2219 a stack boundary. */
2221 /* We assume at most one partial arg, and it must be the first
2222 argument on the stack. */
2223 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2225 pretend_bytes = partial;
2226 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2228 /* We want to align relative to the actual stack pointer, so
2229 don't include this in the stack size until later. */
2230 all->extra_pretend_bytes = all->pretend_args_size;
2234 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2235 entry_parm ? data->partial : 0, current_function_decl,
2236 &all->stack_args_size, &data->locate);
2238 /* Adjust offsets to include the pretend args. */
2239 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2240 data->locate.slot_offset.constant += pretend_bytes;
2241 data->locate.offset.constant += pretend_bytes;
2243 data->entry_parm = entry_parm;
2246 /* A subroutine of assign_parms. If there is actually space on the stack
2247 for this parm, count it in stack_args_size and return true. */
2249 static bool
2250 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2251 struct assign_parm_data_one *data)
2253 /* Trivially true if we've no incoming register. */
2254 if (data->entry_parm == NULL)
2256 /* Also true if we're partially in registers and partially not,
2257 since we've arranged to drop the entire argument on the stack. */
2258 else if (data->partial != 0)
2260 /* Also true if the target says that it's passed in both registers
2261 and on the stack. */
2262 else if (GET_CODE (data->entry_parm) == PARALLEL
2263 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2265 /* Also true if the target says that there's stack allocated for
2266 all register parameters. */
2267 else if (all->reg_parm_stack_space > 0)
2269 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2270 else
2271 return false;
2273 all->stack_args_size.constant += data->locate.size.constant;
2274 if (data->locate.size.var)
2275 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2277 return true;
2280 /* A subroutine of assign_parms. Given that this parameter is allocated
2281 stack space by the ABI, find it. */
2283 static void
2284 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2286 rtx offset_rtx, stack_parm;
2287 unsigned int align, boundary;
2289 /* If we're passing this arg using a reg, make its stack home the
2290 aligned stack slot. */
2291 if (data->entry_parm)
2292 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2293 else
2294 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2296 stack_parm = current_function_internal_arg_pointer;
2297 if (offset_rtx != const0_rtx)
2298 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2299 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2301 set_mem_attributes (stack_parm, parm, 1);
2303 boundary = data->locate.boundary;
2304 align = BITS_PER_UNIT;
2306 /* If we're padding upward, we know that the alignment of the slot
2307 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2308 intentionally forcing upward padding. Otherwise we have to come
2309 up with a guess at the alignment based on OFFSET_RTX. */
2310 if (data->locate.where_pad != downward || data->entry_parm)
2311 align = boundary;
2312 else if (GET_CODE (offset_rtx) == CONST_INT)
2314 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2315 align = align & -align;
2317 set_mem_align (stack_parm, align);
2319 if (data->entry_parm)
2320 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2322 data->stack_parm = stack_parm;
2325 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2326 always valid and contiguous. */
2328 static void
2329 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2331 rtx entry_parm = data->entry_parm;
2332 rtx stack_parm = data->stack_parm;
2334 /* If this parm was passed part in regs and part in memory, pretend it
2335 arrived entirely in memory by pushing the register-part onto the stack.
2336 In the special case of a DImode or DFmode that is split, we could put
2337 it together in a pseudoreg directly, but for now that's not worth
2338 bothering with. */
2339 if (data->partial != 0)
2341 /* Handle calls that pass values in multiple non-contiguous
2342 locations. The Irix 6 ABI has examples of this. */
2343 if (GET_CODE (entry_parm) == PARALLEL)
2344 emit_group_store (validize_mem (stack_parm), entry_parm,
2345 data->passed_type,
2346 int_size_in_bytes (data->passed_type));
2347 else
2349 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2350 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2351 data->partial / UNITS_PER_WORD);
2354 entry_parm = stack_parm;
2357 /* If we didn't decide this parm came in a register, by default it came
2358 on the stack. */
2359 else if (entry_parm == NULL)
2360 entry_parm = stack_parm;
2362 /* When an argument is passed in multiple locations, we can't make use
2363 of this information, but we can save some copying if the whole argument
2364 is passed in a single register. */
2365 else if (GET_CODE (entry_parm) == PARALLEL
2366 && data->nominal_mode != BLKmode
2367 && data->passed_mode != BLKmode)
2369 size_t i, len = XVECLEN (entry_parm, 0);
2371 for (i = 0; i < len; i++)
2372 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2373 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2374 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2375 == data->passed_mode)
2376 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2378 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2379 break;
2383 data->entry_parm = entry_parm;
2386 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2387 always valid and properly aligned. */
2389 static void
2390 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2392 rtx stack_parm = data->stack_parm;
2394 /* If we can't trust the parm stack slot to be aligned enough for its
2395 ultimate type, don't use that slot after entry. We'll make another
2396 stack slot, if we need one. */
2397 if (stack_parm
2398 && ((STRICT_ALIGNMENT
2399 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2400 || (data->nominal_type
2401 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2402 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2403 stack_parm = NULL;
2405 /* If parm was passed in memory, and we need to convert it on entry,
2406 don't store it back in that same slot. */
2407 else if (data->entry_parm == stack_parm
2408 && data->nominal_mode != BLKmode
2409 && data->nominal_mode != data->passed_mode)
2410 stack_parm = NULL;
2412 /* If stack protection is in effect for this function, don't leave any
2413 pointers in their passed stack slots. */
2414 else if (cfun->stack_protect_guard
2415 && (flag_stack_protect == 2
2416 || data->passed_pointer
2417 || POINTER_TYPE_P (data->nominal_type)))
2418 stack_parm = NULL;
2420 data->stack_parm = stack_parm;
2423 /* A subroutine of assign_parms. Return true if the current parameter
2424 should be stored as a BLKmode in the current frame. */
2426 static bool
2427 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2429 if (data->nominal_mode == BLKmode)
2430 return true;
2431 if (GET_CODE (data->entry_parm) == PARALLEL)
2432 return true;
2434 #ifdef BLOCK_REG_PADDING
2435 /* Only assign_parm_setup_block knows how to deal with register arguments
2436 that are padded at the least significant end. */
2437 if (REG_P (data->entry_parm)
2438 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2439 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2440 == (BYTES_BIG_ENDIAN ? upward : downward)))
2441 return true;
2442 #endif
2444 return false;
2447 /* A subroutine of assign_parms. Arrange for the parameter to be
2448 present and valid in DATA->STACK_RTL. */
2450 static void
2451 assign_parm_setup_block (struct assign_parm_data_all *all,
2452 tree parm, struct assign_parm_data_one *data)
2454 rtx entry_parm = data->entry_parm;
2455 rtx stack_parm = data->stack_parm;
2456 HOST_WIDE_INT size;
2457 HOST_WIDE_INT size_stored;
2458 rtx orig_entry_parm = entry_parm;
2460 if (GET_CODE (entry_parm) == PARALLEL)
2461 entry_parm = emit_group_move_into_temps (entry_parm);
2463 /* If we've a non-block object that's nevertheless passed in parts,
2464 reconstitute it in register operations rather than on the stack. */
2465 if (GET_CODE (entry_parm) == PARALLEL
2466 && data->nominal_mode != BLKmode)
2468 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2470 if ((XVECLEN (entry_parm, 0) > 1
2471 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2472 && use_register_for_decl (parm))
2474 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2476 push_to_sequence (all->conversion_insns);
2478 /* For values returned in multiple registers, handle possible
2479 incompatible calls to emit_group_store.
2481 For example, the following would be invalid, and would have to
2482 be fixed by the conditional below:
2484 emit_group_store ((reg:SF), (parallel:DF))
2485 emit_group_store ((reg:SI), (parallel:DI))
2487 An example of this are doubles in e500 v2:
2488 (parallel:DF (expr_list (reg:SI) (const_int 0))
2489 (expr_list (reg:SI) (const_int 4))). */
2490 if (data->nominal_mode != data->passed_mode)
2492 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2493 emit_group_store (t, entry_parm, NULL_TREE,
2494 GET_MODE_SIZE (GET_MODE (entry_parm)));
2495 convert_move (parmreg, t, 0);
2497 else
2498 emit_group_store (parmreg, entry_parm, data->nominal_type,
2499 int_size_in_bytes (data->nominal_type));
2501 all->conversion_insns = get_insns ();
2502 end_sequence ();
2504 SET_DECL_RTL (parm, parmreg);
2505 return;
2509 size = int_size_in_bytes (data->passed_type);
2510 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2511 if (stack_parm == 0)
2513 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2514 stack_parm = assign_stack_local (BLKmode, size_stored,
2515 DECL_ALIGN (parm));
2516 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2517 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2518 set_mem_attributes (stack_parm, parm, 1);
2521 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2522 calls that pass values in multiple non-contiguous locations. */
2523 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2525 rtx mem;
2527 /* Note that we will be storing an integral number of words.
2528 So we have to be careful to ensure that we allocate an
2529 integral number of words. We do this above when we call
2530 assign_stack_local if space was not allocated in the argument
2531 list. If it was, this will not work if PARM_BOUNDARY is not
2532 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2533 if it becomes a problem. Exception is when BLKmode arrives
2534 with arguments not conforming to word_mode. */
2536 if (data->stack_parm == 0)
2538 else if (GET_CODE (entry_parm) == PARALLEL)
2540 else
2541 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2543 mem = validize_mem (stack_parm);
2545 /* Handle values in multiple non-contiguous locations. */
2546 if (GET_CODE (entry_parm) == PARALLEL)
2548 push_to_sequence (all->conversion_insns);
2549 emit_group_store (mem, entry_parm, data->passed_type, size);
2550 all->conversion_insns = get_insns ();
2551 end_sequence ();
2554 else if (size == 0)
2557 /* If SIZE is that of a mode no bigger than a word, just use
2558 that mode's store operation. */
2559 else if (size <= UNITS_PER_WORD)
2561 enum machine_mode mode
2562 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2564 if (mode != BLKmode
2565 #ifdef BLOCK_REG_PADDING
2566 && (size == UNITS_PER_WORD
2567 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2568 != (BYTES_BIG_ENDIAN ? upward : downward)))
2569 #endif
2572 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2573 emit_move_insn (change_address (mem, mode, 0), reg);
2576 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2577 machine must be aligned to the left before storing
2578 to memory. Note that the previous test doesn't
2579 handle all cases (e.g. SIZE == 3). */
2580 else if (size != UNITS_PER_WORD
2581 #ifdef BLOCK_REG_PADDING
2582 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2583 == downward)
2584 #else
2585 && BYTES_BIG_ENDIAN
2586 #endif
2589 rtx tem, x;
2590 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2591 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2593 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2594 build_int_cst (NULL_TREE, by),
2595 NULL_RTX, 1);
2596 tem = change_address (mem, word_mode, 0);
2597 emit_move_insn (tem, x);
2599 else
2600 move_block_from_reg (REGNO (entry_parm), mem,
2601 size_stored / UNITS_PER_WORD);
2603 else
2604 move_block_from_reg (REGNO (entry_parm), mem,
2605 size_stored / UNITS_PER_WORD);
2607 else if (data->stack_parm == 0)
2609 push_to_sequence (all->conversion_insns);
2610 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2611 BLOCK_OP_NORMAL);
2612 all->conversion_insns = get_insns ();
2613 end_sequence ();
2616 data->stack_parm = stack_parm;
2617 SET_DECL_RTL (parm, stack_parm);
2620 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2621 parameter. Get it there. Perform all ABI specified conversions. */
2623 static void
2624 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2625 struct assign_parm_data_one *data)
2627 rtx parmreg;
2628 enum machine_mode promoted_nominal_mode;
2629 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2630 bool did_conversion = false;
2632 /* Store the parm in a pseudoregister during the function, but we may
2633 need to do it in a wider mode. */
2635 /* This is not really promoting for a call. However we need to be
2636 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2637 promoted_nominal_mode
2638 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2640 parmreg = gen_reg_rtx (promoted_nominal_mode);
2642 if (!DECL_ARTIFICIAL (parm))
2643 mark_user_reg (parmreg);
2645 /* If this was an item that we received a pointer to,
2646 set DECL_RTL appropriately. */
2647 if (data->passed_pointer)
2649 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2650 set_mem_attributes (x, parm, 1);
2651 SET_DECL_RTL (parm, x);
2653 else
2654 SET_DECL_RTL (parm, parmreg);
2656 /* Copy the value into the register. */
2657 if (data->nominal_mode != data->passed_mode
2658 || promoted_nominal_mode != data->promoted_mode)
2660 int save_tree_used;
2662 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2663 mode, by the caller. We now have to convert it to
2664 NOMINAL_MODE, if different. However, PARMREG may be in
2665 a different mode than NOMINAL_MODE if it is being stored
2666 promoted.
2668 If ENTRY_PARM is a hard register, it might be in a register
2669 not valid for operating in its mode (e.g., an odd-numbered
2670 register for a DFmode). In that case, moves are the only
2671 thing valid, so we can't do a convert from there. This
2672 occurs when the calling sequence allow such misaligned
2673 usages.
2675 In addition, the conversion may involve a call, which could
2676 clobber parameters which haven't been copied to pseudo
2677 registers yet. Therefore, we must first copy the parm to
2678 a pseudo reg here, and save the conversion until after all
2679 parameters have been moved. */
2681 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2683 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2685 push_to_sequence (all->conversion_insns);
2686 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2688 if (GET_CODE (tempreg) == SUBREG
2689 && GET_MODE (tempreg) == data->nominal_mode
2690 && REG_P (SUBREG_REG (tempreg))
2691 && data->nominal_mode == data->passed_mode
2692 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2693 && GET_MODE_SIZE (GET_MODE (tempreg))
2694 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2696 /* The argument is already sign/zero extended, so note it
2697 into the subreg. */
2698 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2699 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2702 /* TREE_USED gets set erroneously during expand_assignment. */
2703 save_tree_used = TREE_USED (parm);
2704 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2705 TREE_USED (parm) = save_tree_used;
2706 all->conversion_insns = get_insns ();
2707 end_sequence ();
2709 did_conversion = true;
2711 else
2712 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2714 /* If we were passed a pointer but the actual value can safely live
2715 in a register, put it in one. */
2716 if (data->passed_pointer
2717 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2718 /* If by-reference argument was promoted, demote it. */
2719 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2720 || use_register_for_decl (parm)))
2722 /* We can't use nominal_mode, because it will have been set to
2723 Pmode above. We must use the actual mode of the parm. */
2724 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2725 mark_user_reg (parmreg);
2727 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2729 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2730 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2732 push_to_sequence (all->conversion_insns);
2733 emit_move_insn (tempreg, DECL_RTL (parm));
2734 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2735 emit_move_insn (parmreg, tempreg);
2736 all->conversion_insns = get_insns ();
2737 end_sequence ();
2739 did_conversion = true;
2741 else
2742 emit_move_insn (parmreg, DECL_RTL (parm));
2744 SET_DECL_RTL (parm, parmreg);
2746 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2747 now the parm. */
2748 data->stack_parm = NULL;
2751 /* Mark the register as eliminable if we did no conversion and it was
2752 copied from memory at a fixed offset, and the arg pointer was not
2753 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2754 offset formed an invalid address, such memory-equivalences as we
2755 make here would screw up life analysis for it. */
2756 if (data->nominal_mode == data->passed_mode
2757 && !did_conversion
2758 && data->stack_parm != 0
2759 && MEM_P (data->stack_parm)
2760 && data->locate.offset.var == 0
2761 && reg_mentioned_p (virtual_incoming_args_rtx,
2762 XEXP (data->stack_parm, 0)))
2764 rtx linsn = get_last_insn ();
2765 rtx sinsn, set;
2767 /* Mark complex types separately. */
2768 if (GET_CODE (parmreg) == CONCAT)
2770 enum machine_mode submode
2771 = GET_MODE_INNER (GET_MODE (parmreg));
2772 int regnor = REGNO (XEXP (parmreg, 0));
2773 int regnoi = REGNO (XEXP (parmreg, 1));
2774 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2775 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2776 GET_MODE_SIZE (submode));
2778 /* Scan backwards for the set of the real and
2779 imaginary parts. */
2780 for (sinsn = linsn; sinsn != 0;
2781 sinsn = prev_nonnote_insn (sinsn))
2783 set = single_set (sinsn);
2784 if (set == 0)
2785 continue;
2787 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2788 REG_NOTES (sinsn)
2789 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2790 REG_NOTES (sinsn));
2791 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2792 REG_NOTES (sinsn)
2793 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2794 REG_NOTES (sinsn));
2797 else if ((set = single_set (linsn)) != 0
2798 && SET_DEST (set) == parmreg)
2799 REG_NOTES (linsn)
2800 = gen_rtx_EXPR_LIST (REG_EQUIV,
2801 data->stack_parm, REG_NOTES (linsn));
2804 /* For pointer data type, suggest pointer register. */
2805 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2806 mark_reg_pointer (parmreg,
2807 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2810 /* A subroutine of assign_parms. Allocate stack space to hold the current
2811 parameter. Get it there. Perform all ABI specified conversions. */
2813 static void
2814 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2815 struct assign_parm_data_one *data)
2817 /* Value must be stored in the stack slot STACK_PARM during function
2818 execution. */
2819 bool to_conversion = false;
2821 if (data->promoted_mode != data->nominal_mode)
2823 /* Conversion is required. */
2824 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2826 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2828 push_to_sequence (all->conversion_insns);
2829 to_conversion = true;
2831 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2832 TYPE_UNSIGNED (TREE_TYPE (parm)));
2834 if (data->stack_parm)
2835 /* ??? This may need a big-endian conversion on sparc64. */
2836 data->stack_parm
2837 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2840 if (data->entry_parm != data->stack_parm)
2842 rtx src, dest;
2844 if (data->stack_parm == 0)
2846 data->stack_parm
2847 = assign_stack_local (GET_MODE (data->entry_parm),
2848 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2849 TYPE_ALIGN (data->passed_type));
2850 set_mem_attributes (data->stack_parm, parm, 1);
2853 dest = validize_mem (data->stack_parm);
2854 src = validize_mem (data->entry_parm);
2856 if (MEM_P (src))
2858 /* Use a block move to handle potentially misaligned entry_parm. */
2859 if (!to_conversion)
2860 push_to_sequence (all->conversion_insns);
2861 to_conversion = true;
2863 emit_block_move (dest, src,
2864 GEN_INT (int_size_in_bytes (data->passed_type)),
2865 BLOCK_OP_NORMAL);
2867 else
2868 emit_move_insn (dest, src);
2871 if (to_conversion)
2873 all->conversion_insns = get_insns ();
2874 end_sequence ();
2877 SET_DECL_RTL (parm, data->stack_parm);
2880 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2881 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2883 static void
2884 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2886 tree parm;
2887 tree orig_fnargs = all->orig_fnargs;
2889 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2891 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2892 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2894 rtx tmp, real, imag;
2895 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2897 real = DECL_RTL (fnargs);
2898 imag = DECL_RTL (TREE_CHAIN (fnargs));
2899 if (inner != GET_MODE (real))
2901 real = gen_lowpart_SUBREG (inner, real);
2902 imag = gen_lowpart_SUBREG (inner, imag);
2905 if (TREE_ADDRESSABLE (parm))
2907 rtx rmem, imem;
2908 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2910 /* split_complex_arg put the real and imag parts in
2911 pseudos. Move them to memory. */
2912 tmp = assign_stack_local (DECL_MODE (parm), size,
2913 TYPE_ALIGN (TREE_TYPE (parm)));
2914 set_mem_attributes (tmp, parm, 1);
2915 rmem = adjust_address_nv (tmp, inner, 0);
2916 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2917 push_to_sequence (all->conversion_insns);
2918 emit_move_insn (rmem, real);
2919 emit_move_insn (imem, imag);
2920 all->conversion_insns = get_insns ();
2921 end_sequence ();
2923 else
2924 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2925 SET_DECL_RTL (parm, tmp);
2927 real = DECL_INCOMING_RTL (fnargs);
2928 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2929 if (inner != GET_MODE (real))
2931 real = gen_lowpart_SUBREG (inner, real);
2932 imag = gen_lowpart_SUBREG (inner, imag);
2934 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2935 set_decl_incoming_rtl (parm, tmp);
2936 fnargs = TREE_CHAIN (fnargs);
2938 else
2940 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2941 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2943 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2944 instead of the copy of decl, i.e. FNARGS. */
2945 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2946 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2949 fnargs = TREE_CHAIN (fnargs);
2953 /* Assign RTL expressions to the function's parameters. This may involve
2954 copying them into registers and using those registers as the DECL_RTL. */
2956 static void
2957 assign_parms (tree fndecl)
2959 struct assign_parm_data_all all;
2960 tree fnargs, parm;
2962 current_function_internal_arg_pointer
2963 = targetm.calls.internal_arg_pointer ();
2965 assign_parms_initialize_all (&all);
2966 fnargs = assign_parms_augmented_arg_list (&all);
2968 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2970 struct assign_parm_data_one data;
2972 /* Extract the type of PARM; adjust it according to ABI. */
2973 assign_parm_find_data_types (&all, parm, &data);
2975 /* Early out for errors and void parameters. */
2976 if (data.passed_mode == VOIDmode)
2978 SET_DECL_RTL (parm, const0_rtx);
2979 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2980 continue;
2983 if (current_function_stdarg && !TREE_CHAIN (parm))
2984 assign_parms_setup_varargs (&all, &data, false);
2986 /* Find out where the parameter arrives in this function. */
2987 assign_parm_find_entry_rtl (&all, &data);
2989 /* Find out where stack space for this parameter might be. */
2990 if (assign_parm_is_stack_parm (&all, &data))
2992 assign_parm_find_stack_rtl (parm, &data);
2993 assign_parm_adjust_entry_rtl (&data);
2996 /* Record permanently how this parm was passed. */
2997 set_decl_incoming_rtl (parm, data.entry_parm);
2999 /* Update info on where next arg arrives in registers. */
3000 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3001 data.passed_type, data.named_arg);
3003 assign_parm_adjust_stack_rtl (&data);
3005 if (assign_parm_setup_block_p (&data))
3006 assign_parm_setup_block (&all, parm, &data);
3007 else if (data.passed_pointer || use_register_for_decl (parm))
3008 assign_parm_setup_reg (&all, parm, &data);
3009 else
3010 assign_parm_setup_stack (&all, parm, &data);
3013 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3014 assign_parms_unsplit_complex (&all, fnargs);
3016 /* Output all parameter conversion instructions (possibly including calls)
3017 now that all parameters have been copied out of hard registers. */
3018 emit_insn (all.conversion_insns);
3020 /* If we are receiving a struct value address as the first argument, set up
3021 the RTL for the function result. As this might require code to convert
3022 the transmitted address to Pmode, we do this here to ensure that possible
3023 preliminary conversions of the address have been emitted already. */
3024 if (all.function_result_decl)
3026 tree result = DECL_RESULT (current_function_decl);
3027 rtx addr = DECL_RTL (all.function_result_decl);
3028 rtx x;
3030 if (DECL_BY_REFERENCE (result))
3031 x = addr;
3032 else
3034 addr = convert_memory_address (Pmode, addr);
3035 x = gen_rtx_MEM (DECL_MODE (result), addr);
3036 set_mem_attributes (x, result, 1);
3038 SET_DECL_RTL (result, x);
3041 /* We have aligned all the args, so add space for the pretend args. */
3042 current_function_pretend_args_size = all.pretend_args_size;
3043 all.stack_args_size.constant += all.extra_pretend_bytes;
3044 current_function_args_size = all.stack_args_size.constant;
3046 /* Adjust function incoming argument size for alignment and
3047 minimum length. */
3049 #ifdef REG_PARM_STACK_SPACE
3050 current_function_args_size = MAX (current_function_args_size,
3051 REG_PARM_STACK_SPACE (fndecl));
3052 #endif
3054 current_function_args_size = CEIL_ROUND (current_function_args_size,
3055 PARM_BOUNDARY / BITS_PER_UNIT);
3057 #ifdef ARGS_GROW_DOWNWARD
3058 current_function_arg_offset_rtx
3059 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3060 : expand_expr (size_diffop (all.stack_args_size.var,
3061 size_int (-all.stack_args_size.constant)),
3062 NULL_RTX, VOIDmode, 0));
3063 #else
3064 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3065 #endif
3067 /* See how many bytes, if any, of its args a function should try to pop
3068 on return. */
3070 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3071 current_function_args_size);
3073 /* For stdarg.h function, save info about
3074 regs and stack space used by the named args. */
3076 current_function_args_info = all.args_so_far;
3078 /* Set the rtx used for the function return value. Put this in its
3079 own variable so any optimizers that need this information don't have
3080 to include tree.h. Do this here so it gets done when an inlined
3081 function gets output. */
3083 current_function_return_rtx
3084 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3085 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3087 /* If scalar return value was computed in a pseudo-reg, or was a named
3088 return value that got dumped to the stack, copy that to the hard
3089 return register. */
3090 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3092 tree decl_result = DECL_RESULT (fndecl);
3093 rtx decl_rtl = DECL_RTL (decl_result);
3095 if (REG_P (decl_rtl)
3096 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3097 : DECL_REGISTER (decl_result))
3099 rtx real_decl_rtl;
3101 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3102 fndecl, true);
3103 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3104 /* The delay slot scheduler assumes that current_function_return_rtx
3105 holds the hard register containing the return value, not a
3106 temporary pseudo. */
3107 current_function_return_rtx = real_decl_rtl;
3112 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3113 For all seen types, gimplify their sizes. */
3115 static tree
3116 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3118 tree t = *tp;
3120 *walk_subtrees = 0;
3121 if (TYPE_P (t))
3123 if (POINTER_TYPE_P (t))
3124 *walk_subtrees = 1;
3125 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3126 && !TYPE_SIZES_GIMPLIFIED (t))
3128 gimplify_type_sizes (t, (tree *) data);
3129 *walk_subtrees = 1;
3133 return NULL;
3136 /* Gimplify the parameter list for current_function_decl. This involves
3137 evaluating SAVE_EXPRs of variable sized parameters and generating code
3138 to implement callee-copies reference parameters. Returns a list of
3139 statements to add to the beginning of the function, or NULL if nothing
3140 to do. */
3142 tree
3143 gimplify_parameters (void)
3145 struct assign_parm_data_all all;
3146 tree fnargs, parm, stmts = NULL;
3148 assign_parms_initialize_all (&all);
3149 fnargs = assign_parms_augmented_arg_list (&all);
3151 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3153 struct assign_parm_data_one data;
3155 /* Extract the type of PARM; adjust it according to ABI. */
3156 assign_parm_find_data_types (&all, parm, &data);
3158 /* Early out for errors and void parameters. */
3159 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3160 continue;
3162 /* Update info on where next arg arrives in registers. */
3163 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3164 data.passed_type, data.named_arg);
3166 /* ??? Once upon a time variable_size stuffed parameter list
3167 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3168 turned out to be less than manageable in the gimple world.
3169 Now we have to hunt them down ourselves. */
3170 walk_tree_without_duplicates (&data.passed_type,
3171 gimplify_parm_type, &stmts);
3173 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3175 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3176 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3179 if (data.passed_pointer)
3181 tree type = TREE_TYPE (data.passed_type);
3182 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3183 type, data.named_arg))
3185 tree local, t;
3187 /* For constant sized objects, this is trivial; for
3188 variable-sized objects, we have to play games. */
3189 if (TREE_CONSTANT (DECL_SIZE (parm)))
3191 local = create_tmp_var (type, get_name (parm));
3192 DECL_IGNORED_P (local) = 0;
3194 else
3196 tree ptr_type, addr, args;
3198 ptr_type = build_pointer_type (type);
3199 addr = create_tmp_var (ptr_type, get_name (parm));
3200 DECL_IGNORED_P (addr) = 0;
3201 local = build_fold_indirect_ref (addr);
3203 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3204 t = built_in_decls[BUILT_IN_ALLOCA];
3205 t = build_function_call_expr (t, args);
3206 t = fold_convert (ptr_type, t);
3207 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3208 gimplify_and_add (t, &stmts);
3211 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3212 gimplify_and_add (t, &stmts);
3214 SET_DECL_VALUE_EXPR (parm, local);
3215 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3220 return stmts;
3223 /* Indicate whether REGNO is an incoming argument to the current function
3224 that was promoted to a wider mode. If so, return the RTX for the
3225 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3226 that REGNO is promoted from and whether the promotion was signed or
3227 unsigned. */
3230 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3232 tree arg;
3234 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3235 arg = TREE_CHAIN (arg))
3236 if (REG_P (DECL_INCOMING_RTL (arg))
3237 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3238 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3240 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3241 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3243 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3244 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3245 && mode != DECL_MODE (arg))
3247 *pmode = DECL_MODE (arg);
3248 *punsignedp = unsignedp;
3249 return DECL_INCOMING_RTL (arg);
3253 return 0;
3257 /* Compute the size and offset from the start of the stacked arguments for a
3258 parm passed in mode PASSED_MODE and with type TYPE.
3260 INITIAL_OFFSET_PTR points to the current offset into the stacked
3261 arguments.
3263 The starting offset and size for this parm are returned in
3264 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3265 nonzero, the offset is that of stack slot, which is returned in
3266 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3267 padding required from the initial offset ptr to the stack slot.
3269 IN_REGS is nonzero if the argument will be passed in registers. It will
3270 never be set if REG_PARM_STACK_SPACE is not defined.
3272 FNDECL is the function in which the argument was defined.
3274 There are two types of rounding that are done. The first, controlled by
3275 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3276 list to be aligned to the specific boundary (in bits). This rounding
3277 affects the initial and starting offsets, but not the argument size.
3279 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3280 optionally rounds the size of the parm to PARM_BOUNDARY. The
3281 initial offset is not affected by this rounding, while the size always
3282 is and the starting offset may be. */
3284 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3285 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3286 callers pass in the total size of args so far as
3287 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3289 void
3290 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3291 int partial, tree fndecl ATTRIBUTE_UNUSED,
3292 struct args_size *initial_offset_ptr,
3293 struct locate_and_pad_arg_data *locate)
3295 tree sizetree;
3296 enum direction where_pad;
3297 unsigned int boundary;
3298 int reg_parm_stack_space = 0;
3299 int part_size_in_regs;
3301 #ifdef REG_PARM_STACK_SPACE
3302 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3304 /* If we have found a stack parm before we reach the end of the
3305 area reserved for registers, skip that area. */
3306 if (! in_regs)
3308 if (reg_parm_stack_space > 0)
3310 if (initial_offset_ptr->var)
3312 initial_offset_ptr->var
3313 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3314 ssize_int (reg_parm_stack_space));
3315 initial_offset_ptr->constant = 0;
3317 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3318 initial_offset_ptr->constant = reg_parm_stack_space;
3321 #endif /* REG_PARM_STACK_SPACE */
3323 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3325 sizetree
3326 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3327 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3328 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3329 locate->where_pad = where_pad;
3330 locate->boundary = boundary;
3332 /* Remember if the outgoing parameter requires extra alignment on the
3333 calling function side. */
3334 if (boundary > PREFERRED_STACK_BOUNDARY)
3335 boundary = PREFERRED_STACK_BOUNDARY;
3336 if (cfun->stack_alignment_needed < boundary)
3337 cfun->stack_alignment_needed = boundary;
3339 #ifdef ARGS_GROW_DOWNWARD
3340 locate->slot_offset.constant = -initial_offset_ptr->constant;
3341 if (initial_offset_ptr->var)
3342 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3343 initial_offset_ptr->var);
3346 tree s2 = sizetree;
3347 if (where_pad != none
3348 && (!host_integerp (sizetree, 1)
3349 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3350 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3351 SUB_PARM_SIZE (locate->slot_offset, s2);
3354 locate->slot_offset.constant += part_size_in_regs;
3356 if (!in_regs
3357 #ifdef REG_PARM_STACK_SPACE
3358 || REG_PARM_STACK_SPACE (fndecl) > 0
3359 #endif
3361 pad_to_arg_alignment (&locate->slot_offset, boundary,
3362 &locate->alignment_pad);
3364 locate->size.constant = (-initial_offset_ptr->constant
3365 - locate->slot_offset.constant);
3366 if (initial_offset_ptr->var)
3367 locate->size.var = size_binop (MINUS_EXPR,
3368 size_binop (MINUS_EXPR,
3369 ssize_int (0),
3370 initial_offset_ptr->var),
3371 locate->slot_offset.var);
3373 /* Pad_below needs the pre-rounded size to know how much to pad
3374 below. */
3375 locate->offset = locate->slot_offset;
3376 if (where_pad == downward)
3377 pad_below (&locate->offset, passed_mode, sizetree);
3379 #else /* !ARGS_GROW_DOWNWARD */
3380 if (!in_regs
3381 #ifdef REG_PARM_STACK_SPACE
3382 || REG_PARM_STACK_SPACE (fndecl) > 0
3383 #endif
3385 pad_to_arg_alignment (initial_offset_ptr, boundary,
3386 &locate->alignment_pad);
3387 locate->slot_offset = *initial_offset_ptr;
3389 #ifdef PUSH_ROUNDING
3390 if (passed_mode != BLKmode)
3391 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3392 #endif
3394 /* Pad_below needs the pre-rounded size to know how much to pad below
3395 so this must be done before rounding up. */
3396 locate->offset = locate->slot_offset;
3397 if (where_pad == downward)
3398 pad_below (&locate->offset, passed_mode, sizetree);
3400 if (where_pad != none
3401 && (!host_integerp (sizetree, 1)
3402 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3403 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3405 ADD_PARM_SIZE (locate->size, sizetree);
3407 locate->size.constant -= part_size_in_regs;
3408 #endif /* ARGS_GROW_DOWNWARD */
3411 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3412 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3414 static void
3415 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3416 struct args_size *alignment_pad)
3418 tree save_var = NULL_TREE;
3419 HOST_WIDE_INT save_constant = 0;
3420 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3421 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3423 #ifdef SPARC_STACK_BOUNDARY_HACK
3424 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3425 the real alignment of %sp. However, when it does this, the
3426 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3427 if (SPARC_STACK_BOUNDARY_HACK)
3428 sp_offset = 0;
3429 #endif
3431 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3433 save_var = offset_ptr->var;
3434 save_constant = offset_ptr->constant;
3437 alignment_pad->var = NULL_TREE;
3438 alignment_pad->constant = 0;
3440 if (boundary > BITS_PER_UNIT)
3442 if (offset_ptr->var)
3444 tree sp_offset_tree = ssize_int (sp_offset);
3445 tree offset = size_binop (PLUS_EXPR,
3446 ARGS_SIZE_TREE (*offset_ptr),
3447 sp_offset_tree);
3448 #ifdef ARGS_GROW_DOWNWARD
3449 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3450 #else
3451 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3452 #endif
3454 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3455 /* ARGS_SIZE_TREE includes constant term. */
3456 offset_ptr->constant = 0;
3457 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3458 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3459 save_var);
3461 else
3463 offset_ptr->constant = -sp_offset +
3464 #ifdef ARGS_GROW_DOWNWARD
3465 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3466 #else
3467 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3468 #endif
3469 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3470 alignment_pad->constant = offset_ptr->constant - save_constant;
3475 static void
3476 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3478 if (passed_mode != BLKmode)
3480 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3481 offset_ptr->constant
3482 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3483 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3484 - GET_MODE_SIZE (passed_mode));
3486 else
3488 if (TREE_CODE (sizetree) != INTEGER_CST
3489 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3491 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3492 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3493 /* Add it in. */
3494 ADD_PARM_SIZE (*offset_ptr, s2);
3495 SUB_PARM_SIZE (*offset_ptr, sizetree);
3500 /* Walk the tree of blocks describing the binding levels within a function
3501 and warn about variables the might be killed by setjmp or vfork.
3502 This is done after calling flow_analysis and before global_alloc
3503 clobbers the pseudo-regs to hard regs. */
3505 void
3506 setjmp_vars_warning (tree block)
3508 tree decl, sub;
3510 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3512 if (TREE_CODE (decl) == VAR_DECL
3513 && DECL_RTL_SET_P (decl)
3514 && REG_P (DECL_RTL (decl))
3515 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3516 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3517 " or %<vfork%>",
3518 decl);
3521 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3522 setjmp_vars_warning (sub);
3525 /* Do the appropriate part of setjmp_vars_warning
3526 but for arguments instead of local variables. */
3528 void
3529 setjmp_args_warning (void)
3531 tree decl;
3532 for (decl = DECL_ARGUMENTS (current_function_decl);
3533 decl; decl = TREE_CHAIN (decl))
3534 if (DECL_RTL (decl) != 0
3535 && REG_P (DECL_RTL (decl))
3536 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3537 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3538 decl);
3542 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3543 and create duplicate blocks. */
3544 /* ??? Need an option to either create block fragments or to create
3545 abstract origin duplicates of a source block. It really depends
3546 on what optimization has been performed. */
3548 void
3549 reorder_blocks (void)
3551 tree block = DECL_INITIAL (current_function_decl);
3552 VEC(tree,heap) *block_stack;
3554 if (block == NULL_TREE)
3555 return;
3557 block_stack = VEC_alloc (tree, heap, 10);
3559 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3560 clear_block_marks (block);
3562 /* Prune the old trees away, so that they don't get in the way. */
3563 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3564 BLOCK_CHAIN (block) = NULL_TREE;
3566 /* Recreate the block tree from the note nesting. */
3567 reorder_blocks_1 (get_insns (), block, &block_stack);
3568 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3570 /* Remove deleted blocks from the block fragment chains. */
3571 reorder_fix_fragments (block);
3573 VEC_free (tree, heap, block_stack);
3576 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3578 void
3579 clear_block_marks (tree block)
3581 while (block)
3583 TREE_ASM_WRITTEN (block) = 0;
3584 clear_block_marks (BLOCK_SUBBLOCKS (block));
3585 block = BLOCK_CHAIN (block);
3589 static void
3590 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3592 rtx insn;
3594 for (insn = insns; insn; insn = NEXT_INSN (insn))
3596 if (NOTE_P (insn))
3598 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3600 tree block = NOTE_BLOCK (insn);
3602 /* If we have seen this block before, that means it now
3603 spans multiple address regions. Create a new fragment. */
3604 if (TREE_ASM_WRITTEN (block))
3606 tree new_block = copy_node (block);
3607 tree origin;
3609 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3610 ? BLOCK_FRAGMENT_ORIGIN (block)
3611 : block);
3612 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3613 BLOCK_FRAGMENT_CHAIN (new_block)
3614 = BLOCK_FRAGMENT_CHAIN (origin);
3615 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3617 NOTE_BLOCK (insn) = new_block;
3618 block = new_block;
3621 BLOCK_SUBBLOCKS (block) = 0;
3622 TREE_ASM_WRITTEN (block) = 1;
3623 /* When there's only one block for the entire function,
3624 current_block == block and we mustn't do this, it
3625 will cause infinite recursion. */
3626 if (block != current_block)
3628 BLOCK_SUPERCONTEXT (block) = current_block;
3629 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3630 BLOCK_SUBBLOCKS (current_block) = block;
3631 current_block = block;
3633 VEC_safe_push (tree, heap, *p_block_stack, block);
3635 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3637 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3638 BLOCK_SUBBLOCKS (current_block)
3639 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3640 current_block = BLOCK_SUPERCONTEXT (current_block);
3646 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3647 appears in the block tree, select one of the fragments to become
3648 the new origin block. */
3650 static void
3651 reorder_fix_fragments (tree block)
3653 while (block)
3655 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3656 tree new_origin = NULL_TREE;
3658 if (dup_origin)
3660 if (! TREE_ASM_WRITTEN (dup_origin))
3662 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3664 /* Find the first of the remaining fragments. There must
3665 be at least one -- the current block. */
3666 while (! TREE_ASM_WRITTEN (new_origin))
3667 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3668 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3671 else if (! dup_origin)
3672 new_origin = block;
3674 /* Re-root the rest of the fragments to the new origin. In the
3675 case that DUP_ORIGIN was null, that means BLOCK was the origin
3676 of a chain of fragments and we want to remove those fragments
3677 that didn't make it to the output. */
3678 if (new_origin)
3680 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3681 tree chain = *pp;
3683 while (chain)
3685 if (TREE_ASM_WRITTEN (chain))
3687 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3688 *pp = chain;
3689 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3691 chain = BLOCK_FRAGMENT_CHAIN (chain);
3693 *pp = NULL_TREE;
3696 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3697 block = BLOCK_CHAIN (block);
3701 /* Reverse the order of elements in the chain T of blocks,
3702 and return the new head of the chain (old last element). */
3704 tree
3705 blocks_nreverse (tree t)
3707 tree prev = 0, decl, next;
3708 for (decl = t; decl; decl = next)
3710 next = BLOCK_CHAIN (decl);
3711 BLOCK_CHAIN (decl) = prev;
3712 prev = decl;
3714 return prev;
3717 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3718 non-NULL, list them all into VECTOR, in a depth-first preorder
3719 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3720 blocks. */
3722 static int
3723 all_blocks (tree block, tree *vector)
3725 int n_blocks = 0;
3727 while (block)
3729 TREE_ASM_WRITTEN (block) = 0;
3731 /* Record this block. */
3732 if (vector)
3733 vector[n_blocks] = block;
3735 ++n_blocks;
3737 /* Record the subblocks, and their subblocks... */
3738 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3739 vector ? vector + n_blocks : 0);
3740 block = BLOCK_CHAIN (block);
3743 return n_blocks;
3746 /* Return a vector containing all the blocks rooted at BLOCK. The
3747 number of elements in the vector is stored in N_BLOCKS_P. The
3748 vector is dynamically allocated; it is the caller's responsibility
3749 to call `free' on the pointer returned. */
3751 static tree *
3752 get_block_vector (tree block, int *n_blocks_p)
3754 tree *block_vector;
3756 *n_blocks_p = all_blocks (block, NULL);
3757 block_vector = XNEWVEC (tree, *n_blocks_p);
3758 all_blocks (block, block_vector);
3760 return block_vector;
3763 static GTY(()) int next_block_index = 2;
3765 /* Set BLOCK_NUMBER for all the blocks in FN. */
3767 void
3768 number_blocks (tree fn)
3770 int i;
3771 int n_blocks;
3772 tree *block_vector;
3774 /* For SDB and XCOFF debugging output, we start numbering the blocks
3775 from 1 within each function, rather than keeping a running
3776 count. */
3777 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3778 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3779 next_block_index = 1;
3780 #endif
3782 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3784 /* The top-level BLOCK isn't numbered at all. */
3785 for (i = 1; i < n_blocks; ++i)
3786 /* We number the blocks from two. */
3787 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3789 free (block_vector);
3791 return;
3794 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3796 tree
3797 debug_find_var_in_block_tree (tree var, tree block)
3799 tree t;
3801 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3802 if (t == var)
3803 return block;
3805 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3807 tree ret = debug_find_var_in_block_tree (var, t);
3808 if (ret)
3809 return ret;
3812 return NULL_TREE;
3815 /* Allocate a function structure for FNDECL and set its contents
3816 to the defaults. */
3818 void
3819 allocate_struct_function (tree fndecl)
3821 tree result;
3822 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3824 cfun = ggc_alloc_cleared (sizeof (struct function));
3826 cfun->stack_alignment_needed = STACK_BOUNDARY;
3827 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3829 current_function_funcdef_no = funcdef_no++;
3831 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3833 init_eh_for_function ();
3835 lang_hooks.function.init (cfun);
3836 if (init_machine_status)
3837 cfun->machine = (*init_machine_status) ();
3839 if (fndecl == NULL)
3840 return;
3842 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3843 cfun->decl = fndecl;
3845 result = DECL_RESULT (fndecl);
3846 if (aggregate_value_p (result, fndecl))
3848 #ifdef PCC_STATIC_STRUCT_RETURN
3849 current_function_returns_pcc_struct = 1;
3850 #endif
3851 current_function_returns_struct = 1;
3854 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3856 current_function_stdarg
3857 = (fntype
3858 && TYPE_ARG_TYPES (fntype) != 0
3859 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3860 != void_type_node));
3862 /* Assume all registers in stdarg functions need to be saved. */
3863 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3864 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3867 /* Reset cfun, and other non-struct-function variables to defaults as
3868 appropriate for emitting rtl at the start of a function. */
3870 static void
3871 prepare_function_start (tree fndecl)
3873 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3874 cfun = DECL_STRUCT_FUNCTION (fndecl);
3875 else
3876 allocate_struct_function (fndecl);
3877 init_emit ();
3878 init_varasm_status (cfun);
3879 init_expr ();
3881 cse_not_expected = ! optimize;
3883 /* Caller save not needed yet. */
3884 caller_save_needed = 0;
3886 /* We haven't done register allocation yet. */
3887 reg_renumber = 0;
3889 /* Indicate that we have not instantiated virtual registers yet. */
3890 virtuals_instantiated = 0;
3892 /* Indicate that we want CONCATs now. */
3893 generating_concat_p = 1;
3895 /* Indicate we have no need of a frame pointer yet. */
3896 frame_pointer_needed = 0;
3899 /* Initialize the rtl expansion mechanism so that we can do simple things
3900 like generate sequences. This is used to provide a context during global
3901 initialization of some passes. */
3902 void
3903 init_dummy_function_start (void)
3905 prepare_function_start (NULL);
3908 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3909 and initialize static variables for generating RTL for the statements
3910 of the function. */
3912 void
3913 init_function_start (tree subr)
3915 prepare_function_start (subr);
3917 /* Prevent ever trying to delete the first instruction of a
3918 function. Also tell final how to output a linenum before the
3919 function prologue. Note linenums could be missing, e.g. when
3920 compiling a Java .class file. */
3921 if (! DECL_IS_BUILTIN (subr))
3922 emit_line_note (DECL_SOURCE_LOCATION (subr));
3924 /* Make sure first insn is a note even if we don't want linenums.
3925 This makes sure the first insn will never be deleted.
3926 Also, final expects a note to appear there. */
3927 emit_note (NOTE_INSN_DELETED);
3929 /* Warn if this value is an aggregate type,
3930 regardless of which calling convention we are using for it. */
3931 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3932 warning (OPT_Waggregate_return, "function returns an aggregate");
3935 /* Make sure all values used by the optimization passes have sane
3936 defaults. */
3937 void
3938 init_function_for_compilation (void)
3940 reg_renumber = 0;
3942 /* No prologue/epilogue insns yet. Make sure that these vectors are
3943 empty. */
3944 gcc_assert (VEC_length (int, prologue) == 0);
3945 gcc_assert (VEC_length (int, epilogue) == 0);
3946 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3949 struct tree_opt_pass pass_init_function =
3951 NULL, /* name */
3952 NULL, /* gate */
3953 init_function_for_compilation, /* execute */
3954 NULL, /* sub */
3955 NULL, /* next */
3956 0, /* static_pass_number */
3957 0, /* tv_id */
3958 0, /* properties_required */
3959 0, /* properties_provided */
3960 0, /* properties_destroyed */
3961 0, /* todo_flags_start */
3962 0, /* todo_flags_finish */
3963 0 /* letter */
3967 void
3968 expand_main_function (void)
3970 #if (defined(INVOKE__main) \
3971 || (!defined(HAS_INIT_SECTION) \
3972 && !defined(INIT_SECTION_ASM_OP) \
3973 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3974 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3975 #endif
3978 /* Expand code to initialize the stack_protect_guard. This is invoked at
3979 the beginning of a function to be protected. */
3981 #ifndef HAVE_stack_protect_set
3982 # define HAVE_stack_protect_set 0
3983 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3984 #endif
3986 void
3987 stack_protect_prologue (void)
3989 tree guard_decl = targetm.stack_protect_guard ();
3990 rtx x, y;
3992 /* Avoid expand_expr here, because we don't want guard_decl pulled
3993 into registers unless absolutely necessary. And we know that
3994 cfun->stack_protect_guard is a local stack slot, so this skips
3995 all the fluff. */
3996 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3997 y = validize_mem (DECL_RTL (guard_decl));
3999 /* Allow the target to copy from Y to X without leaking Y into a
4000 register. */
4001 if (HAVE_stack_protect_set)
4003 rtx insn = gen_stack_protect_set (x, y);
4004 if (insn)
4006 emit_insn (insn);
4007 return;
4011 /* Otherwise do a straight move. */
4012 emit_move_insn (x, y);
4015 /* Expand code to verify the stack_protect_guard. This is invoked at
4016 the end of a function to be protected. */
4018 #ifndef HAVE_stack_protect_test
4019 # define HAVE_stack_protect_test 0
4020 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4021 #endif
4023 void
4024 stack_protect_epilogue (void)
4026 tree guard_decl = targetm.stack_protect_guard ();
4027 rtx label = gen_label_rtx ();
4028 rtx x, y, tmp;
4030 /* Avoid expand_expr here, because we don't want guard_decl pulled
4031 into registers unless absolutely necessary. And we know that
4032 cfun->stack_protect_guard is a local stack slot, so this skips
4033 all the fluff. */
4034 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4035 y = validize_mem (DECL_RTL (guard_decl));
4037 /* Allow the target to compare Y with X without leaking either into
4038 a register. */
4039 switch (HAVE_stack_protect_test != 0)
4041 case 1:
4042 tmp = gen_stack_protect_test (x, y, label);
4043 if (tmp)
4045 emit_insn (tmp);
4046 break;
4048 /* FALLTHRU */
4050 default:
4051 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4052 break;
4055 /* The noreturn predictor has been moved to the tree level. The rtl-level
4056 predictors estimate this branch about 20%, which isn't enough to get
4057 things moved out of line. Since this is the only extant case of adding
4058 a noreturn function at the rtl level, it doesn't seem worth doing ought
4059 except adding the prediction by hand. */
4060 tmp = get_last_insn ();
4061 if (JUMP_P (tmp))
4062 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4064 expand_expr_stmt (targetm.stack_protect_fail ());
4065 emit_label (label);
4068 /* Start the RTL for a new function, and set variables used for
4069 emitting RTL.
4070 SUBR is the FUNCTION_DECL node.
4071 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4072 the function's parameters, which must be run at any return statement. */
4074 void
4075 expand_function_start (tree subr)
4077 /* Make sure volatile mem refs aren't considered
4078 valid operands of arithmetic insns. */
4079 init_recog_no_volatile ();
4081 current_function_profile
4082 = (profile_flag
4083 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4085 current_function_limit_stack
4086 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4088 /* Make the label for return statements to jump to. Do not special
4089 case machines with special return instructions -- they will be
4090 handled later during jump, ifcvt, or epilogue creation. */
4091 return_label = gen_label_rtx ();
4093 /* Initialize rtx used to return the value. */
4094 /* Do this before assign_parms so that we copy the struct value address
4095 before any library calls that assign parms might generate. */
4097 /* Decide whether to return the value in memory or in a register. */
4098 if (aggregate_value_p (DECL_RESULT (subr), subr))
4100 /* Returning something that won't go in a register. */
4101 rtx value_address = 0;
4103 #ifdef PCC_STATIC_STRUCT_RETURN
4104 if (current_function_returns_pcc_struct)
4106 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4107 value_address = assemble_static_space (size);
4109 else
4110 #endif
4112 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4113 /* Expect to be passed the address of a place to store the value.
4114 If it is passed as an argument, assign_parms will take care of
4115 it. */
4116 if (sv)
4118 value_address = gen_reg_rtx (Pmode);
4119 emit_move_insn (value_address, sv);
4122 if (value_address)
4124 rtx x = value_address;
4125 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4127 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4128 set_mem_attributes (x, DECL_RESULT (subr), 1);
4130 SET_DECL_RTL (DECL_RESULT (subr), x);
4133 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4134 /* If return mode is void, this decl rtl should not be used. */
4135 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4136 else
4138 /* Compute the return values into a pseudo reg, which we will copy
4139 into the true return register after the cleanups are done. */
4140 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4141 if (TYPE_MODE (return_type) != BLKmode
4142 && targetm.calls.return_in_msb (return_type))
4143 /* expand_function_end will insert the appropriate padding in
4144 this case. Use the return value's natural (unpadded) mode
4145 within the function proper. */
4146 SET_DECL_RTL (DECL_RESULT (subr),
4147 gen_reg_rtx (TYPE_MODE (return_type)));
4148 else
4150 /* In order to figure out what mode to use for the pseudo, we
4151 figure out what the mode of the eventual return register will
4152 actually be, and use that. */
4153 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4155 /* Structures that are returned in registers are not
4156 aggregate_value_p, so we may see a PARALLEL or a REG. */
4157 if (REG_P (hard_reg))
4158 SET_DECL_RTL (DECL_RESULT (subr),
4159 gen_reg_rtx (GET_MODE (hard_reg)));
4160 else
4162 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4163 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4167 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4168 result to the real return register(s). */
4169 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4172 /* Initialize rtx for parameters and local variables.
4173 In some cases this requires emitting insns. */
4174 assign_parms (subr);
4176 /* If function gets a static chain arg, store it. */
4177 if (cfun->static_chain_decl)
4179 tree parm = cfun->static_chain_decl;
4180 rtx local = gen_reg_rtx (Pmode);
4182 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4183 SET_DECL_RTL (parm, local);
4184 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4186 emit_move_insn (local, static_chain_incoming_rtx);
4189 /* If the function receives a non-local goto, then store the
4190 bits we need to restore the frame pointer. */
4191 if (cfun->nonlocal_goto_save_area)
4193 tree t_save;
4194 rtx r_save;
4196 /* ??? We need to do this save early. Unfortunately here is
4197 before the frame variable gets declared. Help out... */
4198 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4200 t_save = build4 (ARRAY_REF, ptr_type_node,
4201 cfun->nonlocal_goto_save_area,
4202 integer_zero_node, NULL_TREE, NULL_TREE);
4203 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4204 r_save = convert_memory_address (Pmode, r_save);
4206 emit_move_insn (r_save, virtual_stack_vars_rtx);
4207 update_nonlocal_goto_save_area ();
4210 /* The following was moved from init_function_start.
4211 The move is supposed to make sdb output more accurate. */
4212 /* Indicate the beginning of the function body,
4213 as opposed to parm setup. */
4214 emit_note (NOTE_INSN_FUNCTION_BEG);
4216 if (!NOTE_P (get_last_insn ()))
4217 emit_note (NOTE_INSN_DELETED);
4218 parm_birth_insn = get_last_insn ();
4220 if (current_function_profile)
4222 #ifdef PROFILE_HOOK
4223 PROFILE_HOOK (current_function_funcdef_no);
4224 #endif
4227 /* After the display initializations is where the tail-recursion label
4228 should go, if we end up needing one. Ensure we have a NOTE here
4229 since some things (like trampolines) get placed before this. */
4230 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4232 /* Make sure there is a line number after the function entry setup code. */
4233 force_next_line_note ();
4236 /* Undo the effects of init_dummy_function_start. */
4237 void
4238 expand_dummy_function_end (void)
4240 /* End any sequences that failed to be closed due to syntax errors. */
4241 while (in_sequence_p ())
4242 end_sequence ();
4244 /* Outside function body, can't compute type's actual size
4245 until next function's body starts. */
4247 free_after_parsing (cfun);
4248 free_after_compilation (cfun);
4249 cfun = 0;
4252 /* Call DOIT for each hard register used as a return value from
4253 the current function. */
4255 void
4256 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4258 rtx outgoing = current_function_return_rtx;
4260 if (! outgoing)
4261 return;
4263 if (REG_P (outgoing))
4264 (*doit) (outgoing, arg);
4265 else if (GET_CODE (outgoing) == PARALLEL)
4267 int i;
4269 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4271 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4273 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4274 (*doit) (x, arg);
4279 static void
4280 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4282 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4285 void
4286 clobber_return_register (void)
4288 diddle_return_value (do_clobber_return_reg, NULL);
4290 /* In case we do use pseudo to return value, clobber it too. */
4291 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4293 tree decl_result = DECL_RESULT (current_function_decl);
4294 rtx decl_rtl = DECL_RTL (decl_result);
4295 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4297 do_clobber_return_reg (decl_rtl, NULL);
4302 static void
4303 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4305 emit_insn (gen_rtx_USE (VOIDmode, reg));
4308 void
4309 use_return_register (void)
4311 diddle_return_value (do_use_return_reg, NULL);
4314 /* Possibly warn about unused parameters. */
4315 void
4316 do_warn_unused_parameter (tree fn)
4318 tree decl;
4320 for (decl = DECL_ARGUMENTS (fn);
4321 decl; decl = TREE_CHAIN (decl))
4322 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4323 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4324 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4327 static GTY(()) rtx initial_trampoline;
4329 /* Generate RTL for the end of the current function. */
4331 void
4332 expand_function_end (void)
4334 rtx clobber_after;
4336 /* If arg_pointer_save_area was referenced only from a nested
4337 function, we will not have initialized it yet. Do that now. */
4338 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4339 get_arg_pointer_save_area (cfun);
4341 /* If we are doing stack checking and this function makes calls,
4342 do a stack probe at the start of the function to ensure we have enough
4343 space for another stack frame. */
4344 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4346 rtx insn, seq;
4348 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4349 if (CALL_P (insn))
4351 start_sequence ();
4352 probe_stack_range (STACK_CHECK_PROTECT,
4353 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4354 seq = get_insns ();
4355 end_sequence ();
4356 emit_insn_before (seq, tail_recursion_reentry);
4357 break;
4361 /* Possibly warn about unused parameters.
4362 When frontend does unit-at-a-time, the warning is already
4363 issued at finalization time. */
4364 if (warn_unused_parameter
4365 && !lang_hooks.callgraph.expand_function)
4366 do_warn_unused_parameter (current_function_decl);
4368 /* End any sequences that failed to be closed due to syntax errors. */
4369 while (in_sequence_p ())
4370 end_sequence ();
4372 clear_pending_stack_adjust ();
4373 do_pending_stack_adjust ();
4375 /* Mark the end of the function body.
4376 If control reaches this insn, the function can drop through
4377 without returning a value. */
4378 emit_note (NOTE_INSN_FUNCTION_END);
4380 /* Must mark the last line number note in the function, so that the test
4381 coverage code can avoid counting the last line twice. This just tells
4382 the code to ignore the immediately following line note, since there
4383 already exists a copy of this note somewhere above. This line number
4384 note is still needed for debugging though, so we can't delete it. */
4385 if (flag_test_coverage)
4386 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4388 /* Output a linenumber for the end of the function.
4389 SDB depends on this. */
4390 force_next_line_note ();
4391 emit_line_note (input_location);
4393 /* Before the return label (if any), clobber the return
4394 registers so that they are not propagated live to the rest of
4395 the function. This can only happen with functions that drop
4396 through; if there had been a return statement, there would
4397 have either been a return rtx, or a jump to the return label.
4399 We delay actual code generation after the current_function_value_rtx
4400 is computed. */
4401 clobber_after = get_last_insn ();
4403 /* Output the label for the actual return from the function. */
4404 emit_label (return_label);
4406 if (USING_SJLJ_EXCEPTIONS)
4408 /* Let except.c know where it should emit the call to unregister
4409 the function context for sjlj exceptions. */
4410 if (flag_exceptions)
4411 sjlj_emit_function_exit_after (get_last_insn ());
4413 else
4415 /* @@@ This is a kludge. We want to ensure that instructions that
4416 may trap are not moved into the epilogue by scheduling, because
4417 we don't always emit unwind information for the epilogue.
4418 However, not all machine descriptions define a blockage insn, so
4419 emit an ASM_INPUT to act as one. */
4420 if (flag_non_call_exceptions)
4421 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4424 /* If this is an implementation of throw, do what's necessary to
4425 communicate between __builtin_eh_return and the epilogue. */
4426 expand_eh_return ();
4428 /* If scalar return value was computed in a pseudo-reg, or was a named
4429 return value that got dumped to the stack, copy that to the hard
4430 return register. */
4431 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4433 tree decl_result = DECL_RESULT (current_function_decl);
4434 rtx decl_rtl = DECL_RTL (decl_result);
4436 if (REG_P (decl_rtl)
4437 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4438 : DECL_REGISTER (decl_result))
4440 rtx real_decl_rtl = current_function_return_rtx;
4442 /* This should be set in assign_parms. */
4443 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4445 /* If this is a BLKmode structure being returned in registers,
4446 then use the mode computed in expand_return. Note that if
4447 decl_rtl is memory, then its mode may have been changed,
4448 but that current_function_return_rtx has not. */
4449 if (GET_MODE (real_decl_rtl) == BLKmode)
4450 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4452 /* If a non-BLKmode return value should be padded at the least
4453 significant end of the register, shift it left by the appropriate
4454 amount. BLKmode results are handled using the group load/store
4455 machinery. */
4456 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4457 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4459 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4460 REGNO (real_decl_rtl)),
4461 decl_rtl);
4462 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4464 /* If a named return value dumped decl_return to memory, then
4465 we may need to re-do the PROMOTE_MODE signed/unsigned
4466 extension. */
4467 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4469 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4471 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4472 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4473 &unsignedp, 1);
4475 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4477 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4479 /* If expand_function_start has created a PARALLEL for decl_rtl,
4480 move the result to the real return registers. Otherwise, do
4481 a group load from decl_rtl for a named return. */
4482 if (GET_CODE (decl_rtl) == PARALLEL)
4483 emit_group_move (real_decl_rtl, decl_rtl);
4484 else
4485 emit_group_load (real_decl_rtl, decl_rtl,
4486 TREE_TYPE (decl_result),
4487 int_size_in_bytes (TREE_TYPE (decl_result)));
4489 /* In the case of complex integer modes smaller than a word, we'll
4490 need to generate some non-trivial bitfield insertions. Do that
4491 on a pseudo and not the hard register. */
4492 else if (GET_CODE (decl_rtl) == CONCAT
4493 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4494 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4496 int old_generating_concat_p;
4497 rtx tmp;
4499 old_generating_concat_p = generating_concat_p;
4500 generating_concat_p = 0;
4501 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4502 generating_concat_p = old_generating_concat_p;
4504 emit_move_insn (tmp, decl_rtl);
4505 emit_move_insn (real_decl_rtl, tmp);
4507 else
4508 emit_move_insn (real_decl_rtl, decl_rtl);
4512 /* If returning a structure, arrange to return the address of the value
4513 in a place where debuggers expect to find it.
4515 If returning a structure PCC style,
4516 the caller also depends on this value.
4517 And current_function_returns_pcc_struct is not necessarily set. */
4518 if (current_function_returns_struct
4519 || current_function_returns_pcc_struct)
4521 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4522 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4523 rtx outgoing;
4525 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4526 type = TREE_TYPE (type);
4527 else
4528 value_address = XEXP (value_address, 0);
4530 outgoing = targetm.calls.function_value (build_pointer_type (type),
4531 current_function_decl, true);
4533 /* Mark this as a function return value so integrate will delete the
4534 assignment and USE below when inlining this function. */
4535 REG_FUNCTION_VALUE_P (outgoing) = 1;
4537 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4538 value_address = convert_memory_address (GET_MODE (outgoing),
4539 value_address);
4541 emit_move_insn (outgoing, value_address);
4543 /* Show return register used to hold result (in this case the address
4544 of the result. */
4545 current_function_return_rtx = outgoing;
4548 /* Emit the actual code to clobber return register. */
4550 rtx seq;
4552 start_sequence ();
4553 clobber_return_register ();
4554 expand_naked_return ();
4555 seq = get_insns ();
4556 end_sequence ();
4558 emit_insn_after (seq, clobber_after);
4561 /* Output the label for the naked return from the function. */
4562 emit_label (naked_return_label);
4564 /* If stack protection is enabled for this function, check the guard. */
4565 if (cfun->stack_protect_guard)
4566 stack_protect_epilogue ();
4568 /* If we had calls to alloca, and this machine needs
4569 an accurate stack pointer to exit the function,
4570 insert some code to save and restore the stack pointer. */
4571 if (! EXIT_IGNORE_STACK
4572 && current_function_calls_alloca)
4574 rtx tem = 0;
4576 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4577 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4580 /* ??? This should no longer be necessary since stupid is no longer with
4581 us, but there are some parts of the compiler (eg reload_combine, and
4582 sh mach_dep_reorg) that still try and compute their own lifetime info
4583 instead of using the general framework. */
4584 use_return_register ();
4588 get_arg_pointer_save_area (struct function *f)
4590 rtx ret = f->x_arg_pointer_save_area;
4592 if (! ret)
4594 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4595 f->x_arg_pointer_save_area = ret;
4598 if (f == cfun && ! f->arg_pointer_save_area_init)
4600 rtx seq;
4602 /* Save the arg pointer at the beginning of the function. The
4603 generated stack slot may not be a valid memory address, so we
4604 have to check it and fix it if necessary. */
4605 start_sequence ();
4606 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4607 seq = get_insns ();
4608 end_sequence ();
4610 push_topmost_sequence ();
4611 emit_insn_after (seq, entry_of_function ());
4612 pop_topmost_sequence ();
4615 return ret;
4618 /* Extend a vector that records the INSN_UIDs of INSNS
4619 (a list of one or more insns). */
4621 static void
4622 record_insns (rtx insns, VEC(int,heap) **vecp)
4624 rtx tmp;
4626 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4627 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4630 /* Set the locator of the insn chain starting at INSN to LOC. */
4631 static void
4632 set_insn_locators (rtx insn, int loc)
4634 while (insn != NULL_RTX)
4636 if (INSN_P (insn))
4637 INSN_LOCATOR (insn) = loc;
4638 insn = NEXT_INSN (insn);
4642 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4643 be running after reorg, SEQUENCE rtl is possible. */
4645 static int
4646 contains (rtx insn, VEC(int,heap) **vec)
4648 int i, j;
4650 if (NONJUMP_INSN_P (insn)
4651 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4653 int count = 0;
4654 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4655 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4656 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4657 == VEC_index (int, *vec, j))
4658 count++;
4659 return count;
4661 else
4663 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4664 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4665 return 1;
4667 return 0;
4671 prologue_epilogue_contains (rtx insn)
4673 if (contains (insn, &prologue))
4674 return 1;
4675 if (contains (insn, &epilogue))
4676 return 1;
4677 return 0;
4681 sibcall_epilogue_contains (rtx insn)
4683 if (sibcall_epilogue)
4684 return contains (insn, &sibcall_epilogue);
4685 return 0;
4688 #ifdef HAVE_return
4689 /* Insert gen_return at the end of block BB. This also means updating
4690 block_for_insn appropriately. */
4692 static void
4693 emit_return_into_block (basic_block bb, rtx line_note)
4695 emit_jump_insn_after (gen_return (), BB_END (bb));
4696 if (line_note)
4697 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4699 #endif /* HAVE_return */
4701 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4703 /* These functions convert the epilogue into a variant that does not
4704 modify the stack pointer. This is used in cases where a function
4705 returns an object whose size is not known until it is computed.
4706 The called function leaves the object on the stack, leaves the
4707 stack depressed, and returns a pointer to the object.
4709 What we need to do is track all modifications and references to the
4710 stack pointer, deleting the modifications and changing the
4711 references to point to the location the stack pointer would have
4712 pointed to had the modifications taken place.
4714 These functions need to be portable so we need to make as few
4715 assumptions about the epilogue as we can. However, the epilogue
4716 basically contains three things: instructions to reset the stack
4717 pointer, instructions to reload registers, possibly including the
4718 frame pointer, and an instruction to return to the caller.
4720 We must be sure of what a relevant epilogue insn is doing. We also
4721 make no attempt to validate the insns we make since if they are
4722 invalid, we probably can't do anything valid. The intent is that
4723 these routines get "smarter" as more and more machines start to use
4724 them and they try operating on different epilogues.
4726 We use the following structure to track what the part of the
4727 epilogue that we've already processed has done. We keep two copies
4728 of the SP equivalence, one for use during the insn we are
4729 processing and one for use in the next insn. The difference is
4730 because one part of a PARALLEL may adjust SP and the other may use
4731 it. */
4733 struct epi_info
4735 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4736 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4737 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4738 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4739 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4740 should be set to once we no longer need
4741 its value. */
4742 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4743 for registers. */
4746 static void handle_epilogue_set (rtx, struct epi_info *);
4747 static void update_epilogue_consts (rtx, rtx, void *);
4748 static void emit_equiv_load (struct epi_info *);
4750 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4751 no modifications to the stack pointer. Return the new list of insns. */
4753 static rtx
4754 keep_stack_depressed (rtx insns)
4756 int j;
4757 struct epi_info info;
4758 rtx insn, next;
4760 /* If the epilogue is just a single instruction, it must be OK as is. */
4761 if (NEXT_INSN (insns) == NULL_RTX)
4762 return insns;
4764 /* Otherwise, start a sequence, initialize the information we have, and
4765 process all the insns we were given. */
4766 start_sequence ();
4768 info.sp_equiv_reg = stack_pointer_rtx;
4769 info.sp_offset = 0;
4770 info.equiv_reg_src = 0;
4772 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4773 info.const_equiv[j] = 0;
4775 insn = insns;
4776 next = NULL_RTX;
4777 while (insn != NULL_RTX)
4779 next = NEXT_INSN (insn);
4781 if (!INSN_P (insn))
4783 add_insn (insn);
4784 insn = next;
4785 continue;
4788 /* If this insn references the register that SP is equivalent to and
4789 we have a pending load to that register, we must force out the load
4790 first and then indicate we no longer know what SP's equivalent is. */
4791 if (info.equiv_reg_src != 0
4792 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4794 emit_equiv_load (&info);
4795 info.sp_equiv_reg = 0;
4798 info.new_sp_equiv_reg = info.sp_equiv_reg;
4799 info.new_sp_offset = info.sp_offset;
4801 /* If this is a (RETURN) and the return address is on the stack,
4802 update the address and change to an indirect jump. */
4803 if (GET_CODE (PATTERN (insn)) == RETURN
4804 || (GET_CODE (PATTERN (insn)) == PARALLEL
4805 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4807 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4808 rtx base = 0;
4809 HOST_WIDE_INT offset = 0;
4810 rtx jump_insn, jump_set;
4812 /* If the return address is in a register, we can emit the insn
4813 unchanged. Otherwise, it must be a MEM and we see what the
4814 base register and offset are. In any case, we have to emit any
4815 pending load to the equivalent reg of SP, if any. */
4816 if (REG_P (retaddr))
4818 emit_equiv_load (&info);
4819 add_insn (insn);
4820 insn = next;
4821 continue;
4823 else
4825 rtx ret_ptr;
4826 gcc_assert (MEM_P (retaddr));
4828 ret_ptr = XEXP (retaddr, 0);
4830 if (REG_P (ret_ptr))
4832 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4833 offset = 0;
4835 else
4837 gcc_assert (GET_CODE (ret_ptr) == PLUS
4838 && REG_P (XEXP (ret_ptr, 0))
4839 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4840 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4841 offset = INTVAL (XEXP (ret_ptr, 1));
4845 /* If the base of the location containing the return pointer
4846 is SP, we must update it with the replacement address. Otherwise,
4847 just build the necessary MEM. */
4848 retaddr = plus_constant (base, offset);
4849 if (base == stack_pointer_rtx)
4850 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4851 plus_constant (info.sp_equiv_reg,
4852 info.sp_offset));
4854 retaddr = gen_rtx_MEM (Pmode, retaddr);
4855 MEM_NOTRAP_P (retaddr) = 1;
4857 /* If there is a pending load to the equivalent register for SP
4858 and we reference that register, we must load our address into
4859 a scratch register and then do that load. */
4860 if (info.equiv_reg_src
4861 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4863 unsigned int regno;
4864 rtx reg;
4866 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4867 if (HARD_REGNO_MODE_OK (regno, Pmode)
4868 && !fixed_regs[regno]
4869 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4870 && !REGNO_REG_SET_P
4871 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4872 && !refers_to_regno_p (regno,
4873 regno + hard_regno_nregs[regno]
4874 [Pmode],
4875 info.equiv_reg_src, NULL)
4876 && info.const_equiv[regno] == 0)
4877 break;
4879 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4881 reg = gen_rtx_REG (Pmode, regno);
4882 emit_move_insn (reg, retaddr);
4883 retaddr = reg;
4886 emit_equiv_load (&info);
4887 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4889 /* Show the SET in the above insn is a RETURN. */
4890 jump_set = single_set (jump_insn);
4891 gcc_assert (jump_set);
4892 SET_IS_RETURN_P (jump_set) = 1;
4895 /* If SP is not mentioned in the pattern and its equivalent register, if
4896 any, is not modified, just emit it. Otherwise, if neither is set,
4897 replace the reference to SP and emit the insn. If none of those are
4898 true, handle each SET individually. */
4899 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4900 && (info.sp_equiv_reg == stack_pointer_rtx
4901 || !reg_set_p (info.sp_equiv_reg, insn)))
4902 add_insn (insn);
4903 else if (! reg_set_p (stack_pointer_rtx, insn)
4904 && (info.sp_equiv_reg == stack_pointer_rtx
4905 || !reg_set_p (info.sp_equiv_reg, insn)))
4907 int changed;
4909 changed = validate_replace_rtx (stack_pointer_rtx,
4910 plus_constant (info.sp_equiv_reg,
4911 info.sp_offset),
4912 insn);
4913 gcc_assert (changed);
4915 add_insn (insn);
4917 else if (GET_CODE (PATTERN (insn)) == SET)
4918 handle_epilogue_set (PATTERN (insn), &info);
4919 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4921 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4922 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4923 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4925 else
4926 add_insn (insn);
4928 info.sp_equiv_reg = info.new_sp_equiv_reg;
4929 info.sp_offset = info.new_sp_offset;
4931 /* Now update any constants this insn sets. */
4932 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4933 insn = next;
4936 insns = get_insns ();
4937 end_sequence ();
4938 return insns;
4941 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4942 structure that contains information about what we've seen so far. We
4943 process this SET by either updating that data or by emitting one or
4944 more insns. */
4946 static void
4947 handle_epilogue_set (rtx set, struct epi_info *p)
4949 /* First handle the case where we are setting SP. Record what it is being
4950 set from, which we must be able to determine */
4951 if (reg_set_p (stack_pointer_rtx, set))
4953 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4955 if (GET_CODE (SET_SRC (set)) == PLUS)
4957 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4958 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4959 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4960 else
4962 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4963 && (REGNO (XEXP (SET_SRC (set), 1))
4964 < FIRST_PSEUDO_REGISTER)
4965 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4966 p->new_sp_offset
4967 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4970 else
4971 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4973 /* If we are adjusting SP, we adjust from the old data. */
4974 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4976 p->new_sp_equiv_reg = p->sp_equiv_reg;
4977 p->new_sp_offset += p->sp_offset;
4980 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4982 return;
4985 /* Next handle the case where we are setting SP's equivalent
4986 register. We must not already have a value to set it to. We
4987 could update, but there seems little point in handling that case.
4988 Note that we have to allow for the case where we are setting the
4989 register set in the previous part of a PARALLEL inside a single
4990 insn. But use the old offset for any updates within this insn.
4991 We must allow for the case where the register is being set in a
4992 different (usually wider) mode than Pmode). */
4993 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4995 gcc_assert (!p->equiv_reg_src
4996 && REG_P (p->new_sp_equiv_reg)
4997 && REG_P (SET_DEST (set))
4998 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4999 <= BITS_PER_WORD)
5000 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5001 p->equiv_reg_src
5002 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5003 plus_constant (p->sp_equiv_reg,
5004 p->sp_offset));
5007 /* Otherwise, replace any references to SP in the insn to its new value
5008 and emit the insn. */
5009 else
5011 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5012 plus_constant (p->sp_equiv_reg,
5013 p->sp_offset));
5014 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5015 plus_constant (p->sp_equiv_reg,
5016 p->sp_offset));
5017 emit_insn (set);
5021 /* Update the tracking information for registers set to constants. */
5023 static void
5024 update_epilogue_consts (rtx dest, rtx x, void *data)
5026 struct epi_info *p = (struct epi_info *) data;
5027 rtx new;
5029 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5030 return;
5032 /* If we are either clobbering a register or doing a partial set,
5033 show we don't know the value. */
5034 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5035 p->const_equiv[REGNO (dest)] = 0;
5037 /* If we are setting it to a constant, record that constant. */
5038 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5039 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5041 /* If this is a binary operation between a register we have been tracking
5042 and a constant, see if we can compute a new constant value. */
5043 else if (ARITHMETIC_P (SET_SRC (x))
5044 && REG_P (XEXP (SET_SRC (x), 0))
5045 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5046 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5047 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5048 && 0 != (new = simplify_binary_operation
5049 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5050 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5051 XEXP (SET_SRC (x), 1)))
5052 && GET_CODE (new) == CONST_INT)
5053 p->const_equiv[REGNO (dest)] = new;
5055 /* Otherwise, we can't do anything with this value. */
5056 else
5057 p->const_equiv[REGNO (dest)] = 0;
5060 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5062 static void
5063 emit_equiv_load (struct epi_info *p)
5065 if (p->equiv_reg_src != 0)
5067 rtx dest = p->sp_equiv_reg;
5069 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5070 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5071 REGNO (p->sp_equiv_reg));
5073 emit_move_insn (dest, p->equiv_reg_src);
5074 p->equiv_reg_src = 0;
5077 #endif
5079 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5080 this into place with notes indicating where the prologue ends and where
5081 the epilogue begins. Update the basic block information when possible. */
5083 void
5084 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5086 int inserted = 0;
5087 edge e;
5088 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5089 rtx seq;
5090 #endif
5091 #ifdef HAVE_prologue
5092 rtx prologue_end = NULL_RTX;
5093 #endif
5094 #if defined (HAVE_epilogue) || defined(HAVE_return)
5095 rtx epilogue_end = NULL_RTX;
5096 #endif
5097 edge_iterator ei;
5099 #ifdef HAVE_prologue
5100 if (HAVE_prologue)
5102 start_sequence ();
5103 seq = gen_prologue ();
5104 emit_insn (seq);
5106 /* Retain a map of the prologue insns. */
5107 record_insns (seq, &prologue);
5108 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5110 seq = get_insns ();
5111 end_sequence ();
5112 set_insn_locators (seq, prologue_locator);
5114 /* Can't deal with multiple successors of the entry block
5115 at the moment. Function should always have at least one
5116 entry point. */
5117 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5119 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5120 inserted = 1;
5122 #endif
5124 /* If the exit block has no non-fake predecessors, we don't need
5125 an epilogue. */
5126 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5127 if ((e->flags & EDGE_FAKE) == 0)
5128 break;
5129 if (e == NULL)
5130 goto epilogue_done;
5132 #ifdef HAVE_return
5133 if (optimize && HAVE_return)
5135 /* If we're allowed to generate a simple return instruction,
5136 then by definition we don't need a full epilogue. Examine
5137 the block that falls through to EXIT. If it does not
5138 contain any code, examine its predecessors and try to
5139 emit (conditional) return instructions. */
5141 basic_block last;
5142 rtx label;
5144 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5145 if (e->flags & EDGE_FALLTHRU)
5146 break;
5147 if (e == NULL)
5148 goto epilogue_done;
5149 last = e->src;
5151 /* Verify that there are no active instructions in the last block. */
5152 label = BB_END (last);
5153 while (label && !LABEL_P (label))
5155 if (active_insn_p (label))
5156 break;
5157 label = PREV_INSN (label);
5160 if (BB_HEAD (last) == label && LABEL_P (label))
5162 edge_iterator ei2;
5163 rtx epilogue_line_note = NULL_RTX;
5165 /* Locate the line number associated with the closing brace,
5166 if we can find one. */
5167 for (seq = get_last_insn ();
5168 seq && ! active_insn_p (seq);
5169 seq = PREV_INSN (seq))
5170 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5172 epilogue_line_note = seq;
5173 break;
5176 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5178 basic_block bb = e->src;
5179 rtx jump;
5181 if (bb == ENTRY_BLOCK_PTR)
5183 ei_next (&ei2);
5184 continue;
5187 jump = BB_END (bb);
5188 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5190 ei_next (&ei2);
5191 continue;
5194 /* If we have an unconditional jump, we can replace that
5195 with a simple return instruction. */
5196 if (simplejump_p (jump))
5198 emit_return_into_block (bb, epilogue_line_note);
5199 delete_insn (jump);
5202 /* If we have a conditional jump, we can try to replace
5203 that with a conditional return instruction. */
5204 else if (condjump_p (jump))
5206 if (! redirect_jump (jump, 0, 0))
5208 ei_next (&ei2);
5209 continue;
5212 /* If this block has only one successor, it both jumps
5213 and falls through to the fallthru block, so we can't
5214 delete the edge. */
5215 if (single_succ_p (bb))
5217 ei_next (&ei2);
5218 continue;
5221 else
5223 ei_next (&ei2);
5224 continue;
5227 /* Fix up the CFG for the successful change we just made. */
5228 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5231 /* Emit a return insn for the exit fallthru block. Whether
5232 this is still reachable will be determined later. */
5234 emit_barrier_after (BB_END (last));
5235 emit_return_into_block (last, epilogue_line_note);
5236 epilogue_end = BB_END (last);
5237 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5238 goto epilogue_done;
5241 #endif
5242 /* Find the edge that falls through to EXIT. Other edges may exist
5243 due to RETURN instructions, but those don't need epilogues.
5244 There really shouldn't be a mixture -- either all should have
5245 been converted or none, however... */
5247 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5248 if (e->flags & EDGE_FALLTHRU)
5249 break;
5250 if (e == NULL)
5251 goto epilogue_done;
5253 #ifdef HAVE_epilogue
5254 if (HAVE_epilogue)
5256 start_sequence ();
5257 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5259 seq = gen_epilogue ();
5261 #ifdef INCOMING_RETURN_ADDR_RTX
5262 /* If this function returns with the stack depressed and we can support
5263 it, massage the epilogue to actually do that. */
5264 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5265 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5266 seq = keep_stack_depressed (seq);
5267 #endif
5269 emit_jump_insn (seq);
5271 /* Retain a map of the epilogue insns. */
5272 record_insns (seq, &epilogue);
5273 set_insn_locators (seq, epilogue_locator);
5275 seq = get_insns ();
5276 end_sequence ();
5278 insert_insn_on_edge (seq, e);
5279 inserted = 1;
5281 else
5282 #endif
5284 basic_block cur_bb;
5286 if (! next_active_insn (BB_END (e->src)))
5287 goto epilogue_done;
5288 /* We have a fall-through edge to the exit block, the source is not
5289 at the end of the function, and there will be an assembler epilogue
5290 at the end of the function.
5291 We can't use force_nonfallthru here, because that would try to
5292 use return. Inserting a jump 'by hand' is extremely messy, so
5293 we take advantage of cfg_layout_finalize using
5294 fixup_fallthru_exit_predecessor. */
5295 cfg_layout_initialize (0);
5296 FOR_EACH_BB (cur_bb)
5297 if (cur_bb->index >= NUM_FIXED_BLOCKS
5298 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5299 cur_bb->aux = cur_bb->next_bb;
5300 cfg_layout_finalize ();
5302 epilogue_done:
5304 if (inserted)
5305 commit_edge_insertions ();
5307 #ifdef HAVE_sibcall_epilogue
5308 /* Emit sibling epilogues before any sibling call sites. */
5309 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5311 basic_block bb = e->src;
5312 rtx insn = BB_END (bb);
5314 if (!CALL_P (insn)
5315 || ! SIBLING_CALL_P (insn))
5317 ei_next (&ei);
5318 continue;
5321 start_sequence ();
5322 emit_insn (gen_sibcall_epilogue ());
5323 seq = get_insns ();
5324 end_sequence ();
5326 /* Retain a map of the epilogue insns. Used in life analysis to
5327 avoid getting rid of sibcall epilogue insns. Do this before we
5328 actually emit the sequence. */
5329 record_insns (seq, &sibcall_epilogue);
5330 set_insn_locators (seq, epilogue_locator);
5332 emit_insn_before (seq, insn);
5333 ei_next (&ei);
5335 #endif
5337 #ifdef HAVE_prologue
5338 /* This is probably all useless now that we use locators. */
5339 if (prologue_end)
5341 rtx insn, prev;
5343 /* GDB handles `break f' by setting a breakpoint on the first
5344 line note after the prologue. Which means (1) that if
5345 there are line number notes before where we inserted the
5346 prologue we should move them, and (2) we should generate a
5347 note before the end of the first basic block, if there isn't
5348 one already there.
5350 ??? This behavior is completely broken when dealing with
5351 multiple entry functions. We simply place the note always
5352 into first basic block and let alternate entry points
5353 to be missed.
5356 for (insn = prologue_end; insn; insn = prev)
5358 prev = PREV_INSN (insn);
5359 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5361 /* Note that we cannot reorder the first insn in the
5362 chain, since rest_of_compilation relies on that
5363 remaining constant. */
5364 if (prev == NULL)
5365 break;
5366 reorder_insns (insn, insn, prologue_end);
5370 /* Find the last line number note in the first block. */
5371 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5372 insn != prologue_end && insn;
5373 insn = PREV_INSN (insn))
5374 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5375 break;
5377 /* If we didn't find one, make a copy of the first line number
5378 we run across. */
5379 if (! insn)
5381 for (insn = next_active_insn (prologue_end);
5382 insn;
5383 insn = PREV_INSN (insn))
5384 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5386 emit_note_copy_after (insn, prologue_end);
5387 break;
5391 #endif
5392 #ifdef HAVE_epilogue
5393 if (epilogue_end)
5395 rtx insn, next;
5397 /* Similarly, move any line notes that appear after the epilogue.
5398 There is no need, however, to be quite so anal about the existence
5399 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5400 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5401 info generation. */
5402 for (insn = epilogue_end; insn; insn = next)
5404 next = NEXT_INSN (insn);
5405 if (NOTE_P (insn)
5406 && (NOTE_LINE_NUMBER (insn) > 0
5407 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5408 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5409 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5412 #endif
5415 /* Reposition the prologue-end and epilogue-begin notes after instruction
5416 scheduling and delayed branch scheduling. */
5418 void
5419 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5421 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5422 rtx insn, last, note;
5423 int len;
5425 if ((len = VEC_length (int, prologue)) > 0)
5427 last = 0, note = 0;
5429 /* Scan from the beginning until we reach the last prologue insn.
5430 We apparently can't depend on basic_block_{head,end} after
5431 reorg has run. */
5432 for (insn = f; insn; insn = NEXT_INSN (insn))
5434 if (NOTE_P (insn))
5436 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5437 note = insn;
5439 else if (contains (insn, &prologue))
5441 last = insn;
5442 if (--len == 0)
5443 break;
5447 if (last)
5449 /* Find the prologue-end note if we haven't already, and
5450 move it to just after the last prologue insn. */
5451 if (note == 0)
5453 for (note = last; (note = NEXT_INSN (note));)
5454 if (NOTE_P (note)
5455 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5456 break;
5459 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5460 if (LABEL_P (last))
5461 last = NEXT_INSN (last);
5462 reorder_insns (note, note, last);
5466 if ((len = VEC_length (int, epilogue)) > 0)
5468 last = 0, note = 0;
5470 /* Scan from the end until we reach the first epilogue insn.
5471 We apparently can't depend on basic_block_{head,end} after
5472 reorg has run. */
5473 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5475 if (NOTE_P (insn))
5477 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5478 note = insn;
5480 else if (contains (insn, &epilogue))
5482 last = insn;
5483 if (--len == 0)
5484 break;
5488 if (last)
5490 /* Find the epilogue-begin note if we haven't already, and
5491 move it to just before the first epilogue insn. */
5492 if (note == 0)
5494 for (note = insn; (note = PREV_INSN (note));)
5495 if (NOTE_P (note)
5496 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5497 break;
5500 if (PREV_INSN (last) != note)
5501 reorder_insns (note, note, PREV_INSN (last));
5504 #endif /* HAVE_prologue or HAVE_epilogue */
5507 /* Resets insn_block_boundaries array. */
5509 void
5510 reset_block_changes (void)
5512 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5513 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5516 /* Record the boundary for BLOCK. */
5517 void
5518 record_block_change (tree block)
5520 int i, n;
5521 tree last_block;
5523 if (!block)
5524 return;
5526 if(!cfun->ib_boundaries_block)
5527 return;
5529 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5530 VARRAY_POP (cfun->ib_boundaries_block);
5531 n = get_max_uid ();
5532 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5533 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5535 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5538 /* Finishes record of boundaries. */
5539 void finalize_block_changes (void)
5541 record_block_change (DECL_INITIAL (current_function_decl));
5544 /* For INSN return the BLOCK it belongs to. */
5545 void
5546 check_block_change (rtx insn, tree *block)
5548 unsigned uid = INSN_UID (insn);
5550 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5551 return;
5553 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5556 /* Releases the ib_boundaries_block records. */
5557 void
5558 free_block_changes (void)
5560 cfun->ib_boundaries_block = NULL;
5563 /* Returns the name of the current function. */
5564 const char *
5565 current_function_name (void)
5567 return lang_hooks.decl_printable_name (cfun->decl, 2);
5571 static void
5572 rest_of_handle_check_leaf_regs (void)
5574 #ifdef LEAF_REGISTERS
5575 current_function_uses_only_leaf_regs
5576 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5577 #endif
5580 struct tree_opt_pass pass_leaf_regs =
5582 NULL, /* name */
5583 NULL, /* gate */
5584 rest_of_handle_check_leaf_regs, /* execute */
5585 NULL, /* sub */
5586 NULL, /* next */
5587 0, /* static_pass_number */
5588 0, /* tv_id */
5589 0, /* properties_required */
5590 0, /* properties_provided */
5591 0, /* properties_destroyed */
5592 0, /* todo_flags_start */
5593 0, /* todo_flags_finish */
5594 0 /* letter */
5598 #include "gt-function.h"