* win32.cc (_Jv_platform_nanotime): New function.
[official-gcc.git] / gcc / function.c
blob473f5d4758fd3eda26af38efcb1a5f889d832070
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64 #include "tree-pass.h"
65 #include "predict.h"
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
97 compiler passes. */
98 int current_function_is_leaf;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 life_analysis has run. */
103 int current_function_sp_is_unchanging;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function * (*init_machine_status) (void);
123 /* The currently compiled function. */
124 struct function *cfun = 0;
126 DEF_VEC_I(int);
127 DEF_VEC_ALLOC_I(int,heap);
129 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
130 static VEC(int,heap) *prologue;
131 static VEC(int,heap) *epilogue;
133 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
134 in this function. */
135 static VEC(int,heap) *sibcall_epilogue;
137 /* In order to evaluate some expressions, such as function calls returning
138 structures in memory, we need to temporarily allocate stack locations.
139 We record each allocated temporary in the following structure.
141 Associated with each temporary slot is a nesting level. When we pop up
142 one level, all temporaries associated with the previous level are freed.
143 Normally, all temporaries are freed after the execution of the statement
144 in which they were created. However, if we are inside a ({...}) grouping,
145 the result may be in a temporary and hence must be preserved. If the
146 result could be in a temporary, we preserve it if we can determine which
147 one it is in. If we cannot determine which temporary may contain the
148 result, all temporaries are preserved. A temporary is preserved by
149 pretending it was allocated at the previous nesting level.
151 Automatic variables are also assigned temporary slots, at the nesting
152 level where they are defined. They are marked a "kept" so that
153 free_temp_slots will not free them. */
155 struct temp_slot GTY(())
157 /* Points to next temporary slot. */
158 struct temp_slot *next;
159 /* Points to previous temporary slot. */
160 struct temp_slot *prev;
162 /* The rtx to used to reference the slot. */
163 rtx slot;
164 /* The rtx used to represent the address if not the address of the
165 slot above. May be an EXPR_LIST if multiple addresses exist. */
166 rtx address;
167 /* The alignment (in bits) of the slot. */
168 unsigned int align;
169 /* The size, in units, of the slot. */
170 HOST_WIDE_INT size;
171 /* The type of the object in the slot, or zero if it doesn't correspond
172 to a type. We use this to determine whether a slot can be reused.
173 It can be reused if objects of the type of the new slot will always
174 conflict with objects of the type of the old slot. */
175 tree type;
176 /* Nonzero if this temporary is currently in use. */
177 char in_use;
178 /* Nonzero if this temporary has its address taken. */
179 char addr_taken;
180 /* Nesting level at which this slot is being used. */
181 int level;
182 /* Nonzero if this should survive a call to free_temp_slots. */
183 int keep;
184 /* The offset of the slot from the frame_pointer, including extra space
185 for alignment. This info is for combine_temp_slots. */
186 HOST_WIDE_INT base_offset;
187 /* The size of the slot, including extra space for alignment. This
188 info is for combine_temp_slots. */
189 HOST_WIDE_INT full_size;
192 /* Forward declarations. */
194 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
195 struct function *);
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static void reorder_fix_fragments (tree);
201 static int all_blocks (tree, tree *);
202 static tree *get_block_vector (tree, int *);
203 extern tree debug_find_var_in_block_tree (tree, tree);
204 /* We always define `record_insns' even if it's not used so that we
205 can always export `prologue_epilogue_contains'. */
206 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
207 static int contains (rtx, VEC(int,heap) **);
208 #ifdef HAVE_return
209 static void emit_return_into_block (basic_block, rtx);
210 #endif
211 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
212 static rtx keep_stack_depressed (rtx);
213 #endif
214 static void prepare_function_start (tree);
215 static void do_clobber_return_reg (rtx, void *);
216 static void do_use_return_reg (rtx, void *);
217 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
219 /* Pointer to chain of `struct function' for containing functions. */
220 struct function *outer_function_chain;
222 /* Given a function decl for a containing function,
223 return the `struct function' for it. */
225 struct function *
226 find_function_data (tree decl)
228 struct function *p;
230 for (p = outer_function_chain; p; p = p->outer)
231 if (p->decl == decl)
232 return p;
234 gcc_unreachable ();
237 /* Save the current context for compilation of a nested function.
238 This is called from language-specific code. The caller should use
239 the enter_nested langhook to save any language-specific state,
240 since this function knows only about language-independent
241 variables. */
243 void
244 push_function_context_to (tree context ATTRIBUTE_UNUSED)
246 struct function *p;
248 if (cfun == 0)
249 init_dummy_function_start ();
250 p = cfun;
252 p->outer = outer_function_chain;
253 outer_function_chain = p;
255 lang_hooks.function.enter_nested (p);
257 cfun = 0;
260 void
261 push_function_context (void)
263 push_function_context_to (current_function_decl);
266 /* Restore the last saved context, at the end of a nested function.
267 This function is called from language-specific code. */
269 void
270 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
272 struct function *p = outer_function_chain;
274 cfun = p;
275 outer_function_chain = p->outer;
277 current_function_decl = p->decl;
279 lang_hooks.function.leave_nested (p);
281 /* Reset variables that have known state during rtx generation. */
282 virtuals_instantiated = 0;
283 generating_concat_p = 1;
286 void
287 pop_function_context (void)
289 pop_function_context_from (current_function_decl);
292 /* Clear out all parts of the state in F that can safely be discarded
293 after the function has been parsed, but not compiled, to let
294 garbage collection reclaim the memory. */
296 void
297 free_after_parsing (struct function *f)
299 /* f->expr->forced_labels is used by code generation. */
300 /* f->emit->regno_reg_rtx is used by code generation. */
301 /* f->varasm is used by code generation. */
302 /* f->eh->eh_return_stub_label is used by code generation. */
304 lang_hooks.function.final (f);
307 /* Clear out all parts of the state in F that can safely be discarded
308 after the function has been compiled, to let garbage collection
309 reclaim the memory. */
311 void
312 free_after_compilation (struct function *f)
314 VEC_free (int, heap, prologue);
315 VEC_free (int, heap, epilogue);
316 VEC_free (int, heap, sibcall_epilogue);
318 f->eh = NULL;
319 f->expr = NULL;
320 f->emit = NULL;
321 f->varasm = NULL;
322 f->machine = NULL;
323 f->cfg = NULL;
325 f->x_avail_temp_slots = NULL;
326 f->x_used_temp_slots = NULL;
327 f->arg_offset_rtx = NULL;
328 f->return_rtx = NULL;
329 f->internal_arg_pointer = NULL;
330 f->x_nonlocal_goto_handler_labels = NULL;
331 f->x_return_label = NULL;
332 f->x_naked_return_label = NULL;
333 f->x_stack_slot_list = NULL;
334 f->x_tail_recursion_reentry = NULL;
335 f->x_arg_pointer_save_area = NULL;
336 f->x_parm_birth_insn = NULL;
337 f->original_arg_vector = NULL;
338 f->original_decl_initial = NULL;
339 f->epilogue_delay_list = NULL;
342 /* Allocate fixed slots in the stack frame of the current function. */
344 /* Return size needed for stack frame based on slots so far allocated in
345 function F.
346 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
347 the caller may have to do that. */
349 static HOST_WIDE_INT
350 get_func_frame_size (struct function *f)
352 if (FRAME_GROWS_DOWNWARD)
353 return -f->x_frame_offset;
354 else
355 return f->x_frame_offset;
358 /* Return size needed for stack frame based on slots so far allocated.
359 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
360 the caller may have to do that. */
361 HOST_WIDE_INT
362 get_frame_size (void)
364 return get_func_frame_size (cfun);
367 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
368 with machine mode MODE.
370 ALIGN controls the amount of alignment for the address of the slot:
371 0 means according to MODE,
372 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
373 -2 means use BITS_PER_UNIT,
374 positive specifies alignment boundary in bits.
376 We do not round to stack_boundary here.
378 FUNCTION specifies the function to allocate in. */
380 static rtx
381 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
382 struct function *function)
384 rtx x, addr;
385 int bigend_correction = 0;
386 unsigned int alignment;
387 int frame_off, frame_alignment, frame_phase;
389 if (align == 0)
391 tree type;
393 if (mode == BLKmode)
394 alignment = BIGGEST_ALIGNMENT;
395 else
396 alignment = GET_MODE_ALIGNMENT (mode);
398 /* Allow the target to (possibly) increase the alignment of this
399 stack slot. */
400 type = lang_hooks.types.type_for_mode (mode, 0);
401 if (type)
402 alignment = LOCAL_ALIGNMENT (type, alignment);
404 alignment /= BITS_PER_UNIT;
406 else if (align == -1)
408 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
409 size = CEIL_ROUND (size, alignment);
411 else if (align == -2)
412 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
413 else
414 alignment = align / BITS_PER_UNIT;
416 if (FRAME_GROWS_DOWNWARD)
417 function->x_frame_offset -= size;
419 /* Ignore alignment we can't do with expected alignment of the boundary. */
420 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
421 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
423 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
424 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
426 /* Calculate how many bytes the start of local variables is off from
427 stack alignment. */
428 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
429 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
430 frame_phase = frame_off ? frame_alignment - frame_off : 0;
432 /* Round the frame offset to the specified alignment. The default is
433 to always honor requests to align the stack but a port may choose to
434 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
435 if (STACK_ALIGNMENT_NEEDED
436 || mode != BLKmode
437 || size != 0)
439 /* We must be careful here, since FRAME_OFFSET might be negative and
440 division with a negative dividend isn't as well defined as we might
441 like. So we instead assume that ALIGNMENT is a power of two and
442 use logical operations which are unambiguous. */
443 if (FRAME_GROWS_DOWNWARD)
444 function->x_frame_offset
445 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
446 (unsigned HOST_WIDE_INT) alignment)
447 + frame_phase);
448 else
449 function->x_frame_offset
450 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
451 (unsigned HOST_WIDE_INT) alignment)
452 + frame_phase);
455 /* On a big-endian machine, if we are allocating more space than we will use,
456 use the least significant bytes of those that are allocated. */
457 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
458 bigend_correction = size - GET_MODE_SIZE (mode);
460 /* If we have already instantiated virtual registers, return the actual
461 address relative to the frame pointer. */
462 if (function == cfun && virtuals_instantiated)
463 addr = plus_constant (frame_pointer_rtx,
464 trunc_int_for_mode
465 (frame_offset + bigend_correction
466 + STARTING_FRAME_OFFSET, Pmode));
467 else
468 addr = plus_constant (virtual_stack_vars_rtx,
469 trunc_int_for_mode
470 (function->x_frame_offset + bigend_correction,
471 Pmode));
473 if (!FRAME_GROWS_DOWNWARD)
474 function->x_frame_offset += size;
476 x = gen_rtx_MEM (mode, addr);
477 MEM_NOTRAP_P (x) = 1;
479 function->x_stack_slot_list
480 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
482 /* Try to detect frame size overflows on native platforms. */
483 #if BITS_PER_WORD >= 32
484 if ((FRAME_GROWS_DOWNWARD
485 ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
486 : (unsigned HOST_WIDE_INT) function->x_frame_offset)
487 > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
488 /* Leave room for the fixed part of the frame. */
489 - 64 * UNITS_PER_WORD)
491 error ("%Jtotal size of local objects too large", function->decl);
492 /* Avoid duplicate error messages as much as possible. */
493 function->x_frame_offset = 0;
495 #endif
497 return x;
500 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
501 current function. */
504 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
506 return assign_stack_local_1 (mode, size, align, cfun);
510 /* Removes temporary slot TEMP from LIST. */
512 static void
513 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
515 if (temp->next)
516 temp->next->prev = temp->prev;
517 if (temp->prev)
518 temp->prev->next = temp->next;
519 else
520 *list = temp->next;
522 temp->prev = temp->next = NULL;
525 /* Inserts temporary slot TEMP to LIST. */
527 static void
528 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
530 temp->next = *list;
531 if (*list)
532 (*list)->prev = temp;
533 temp->prev = NULL;
534 *list = temp;
537 /* Returns the list of used temp slots at LEVEL. */
539 static struct temp_slot **
540 temp_slots_at_level (int level)
543 if (!used_temp_slots)
544 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
546 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
547 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
549 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
552 /* Returns the maximal temporary slot level. */
554 static int
555 max_slot_level (void)
557 if (!used_temp_slots)
558 return -1;
560 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
563 /* Moves temporary slot TEMP to LEVEL. */
565 static void
566 move_slot_to_level (struct temp_slot *temp, int level)
568 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
569 insert_slot_to_list (temp, temp_slots_at_level (level));
570 temp->level = level;
573 /* Make temporary slot TEMP available. */
575 static void
576 make_slot_available (struct temp_slot *temp)
578 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
579 insert_slot_to_list (temp, &avail_temp_slots);
580 temp->in_use = 0;
581 temp->level = -1;
584 /* Allocate a temporary stack slot and record it for possible later
585 reuse.
587 MODE is the machine mode to be given to the returned rtx.
589 SIZE is the size in units of the space required. We do no rounding here
590 since assign_stack_local will do any required rounding.
592 KEEP is 1 if this slot is to be retained after a call to
593 free_temp_slots. Automatic variables for a block are allocated
594 with this flag. KEEP values of 2 or 3 were needed respectively
595 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
596 or for SAVE_EXPRs, but they are now unused.
598 TYPE is the type that will be used for the stack slot. */
601 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
602 int keep, tree type)
604 unsigned int align;
605 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
606 rtx slot;
608 /* If SIZE is -1 it means that somebody tried to allocate a temporary
609 of a variable size. */
610 gcc_assert (size != -1);
612 /* These are now unused. */
613 gcc_assert (keep <= 1);
615 if (mode == BLKmode)
616 align = BIGGEST_ALIGNMENT;
617 else
618 align = GET_MODE_ALIGNMENT (mode);
620 if (! type)
621 type = lang_hooks.types.type_for_mode (mode, 0);
623 if (type)
624 align = LOCAL_ALIGNMENT (type, align);
626 /* Try to find an available, already-allocated temporary of the proper
627 mode which meets the size and alignment requirements. Choose the
628 smallest one with the closest alignment.
630 If assign_stack_temp is called outside of the tree->rtl expansion,
631 we cannot reuse the stack slots (that may still refer to
632 VIRTUAL_STACK_VARS_REGNUM). */
633 if (!virtuals_instantiated)
635 for (p = avail_temp_slots; p; p = p->next)
637 if (p->align >= align && p->size >= size
638 && GET_MODE (p->slot) == mode
639 && objects_must_conflict_p (p->type, type)
640 && (best_p == 0 || best_p->size > p->size
641 || (best_p->size == p->size && best_p->align > p->align)))
643 if (p->align == align && p->size == size)
645 selected = p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647 best_p = 0;
648 break;
650 best_p = p;
655 /* Make our best, if any, the one to use. */
656 if (best_p)
658 selected = best_p;
659 cut_slot_from_list (selected, &avail_temp_slots);
661 /* If there are enough aligned bytes left over, make them into a new
662 temp_slot so that the extra bytes don't get wasted. Do this only
663 for BLKmode slots, so that we can be sure of the alignment. */
664 if (GET_MODE (best_p->slot) == BLKmode)
666 int alignment = best_p->align / BITS_PER_UNIT;
667 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
669 if (best_p->size - rounded_size >= alignment)
671 p = ggc_alloc (sizeof (struct temp_slot));
672 p->in_use = p->addr_taken = 0;
673 p->size = best_p->size - rounded_size;
674 p->base_offset = best_p->base_offset + rounded_size;
675 p->full_size = best_p->full_size - rounded_size;
676 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
677 p->align = best_p->align;
678 p->address = 0;
679 p->type = best_p->type;
680 insert_slot_to_list (p, &avail_temp_slots);
682 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
683 stack_slot_list);
685 best_p->size = rounded_size;
686 best_p->full_size = rounded_size;
691 /* If we still didn't find one, make a new temporary. */
692 if (selected == 0)
694 HOST_WIDE_INT frame_offset_old = frame_offset;
696 p = ggc_alloc (sizeof (struct temp_slot));
698 /* We are passing an explicit alignment request to assign_stack_local.
699 One side effect of that is assign_stack_local will not round SIZE
700 to ensure the frame offset remains suitably aligned.
702 So for requests which depended on the rounding of SIZE, we go ahead
703 and round it now. We also make sure ALIGNMENT is at least
704 BIGGEST_ALIGNMENT. */
705 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
706 p->slot = assign_stack_local (mode,
707 (mode == BLKmode
708 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
709 : size),
710 align);
712 p->align = align;
714 /* The following slot size computation is necessary because we don't
715 know the actual size of the temporary slot until assign_stack_local
716 has performed all the frame alignment and size rounding for the
717 requested temporary. Note that extra space added for alignment
718 can be either above or below this stack slot depending on which
719 way the frame grows. We include the extra space if and only if it
720 is above this slot. */
721 if (FRAME_GROWS_DOWNWARD)
722 p->size = frame_offset_old - frame_offset;
723 else
724 p->size = size;
726 /* Now define the fields used by combine_temp_slots. */
727 if (FRAME_GROWS_DOWNWARD)
729 p->base_offset = frame_offset;
730 p->full_size = frame_offset_old - frame_offset;
732 else
734 p->base_offset = frame_offset_old;
735 p->full_size = frame_offset - frame_offset_old;
737 p->address = 0;
739 selected = p;
742 p = selected;
743 p->in_use = 1;
744 p->addr_taken = 0;
745 p->type = type;
746 p->level = temp_slot_level;
747 p->keep = keep;
749 pp = temp_slots_at_level (p->level);
750 insert_slot_to_list (p, pp);
752 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
753 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
754 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
756 /* If we know the alias set for the memory that will be used, use
757 it. If there's no TYPE, then we don't know anything about the
758 alias set for the memory. */
759 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
760 set_mem_align (slot, align);
762 /* If a type is specified, set the relevant flags. */
763 if (type != 0)
765 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
766 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
768 MEM_NOTRAP_P (slot) = 1;
770 return slot;
773 /* Allocate a temporary stack slot and record it for possible later
774 reuse. First three arguments are same as in preceding function. */
777 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
779 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
782 /* Assign a temporary.
783 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
784 and so that should be used in error messages. In either case, we
785 allocate of the given type.
786 KEEP is as for assign_stack_temp.
787 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
788 it is 0 if a register is OK.
789 DONT_PROMOTE is 1 if we should not promote values in register
790 to wider modes. */
793 assign_temp (tree type_or_decl, int keep, int memory_required,
794 int dont_promote ATTRIBUTE_UNUSED)
796 tree type, decl;
797 enum machine_mode mode;
798 #ifdef PROMOTE_MODE
799 int unsignedp;
800 #endif
802 if (DECL_P (type_or_decl))
803 decl = type_or_decl, type = TREE_TYPE (decl);
804 else
805 decl = NULL, type = type_or_decl;
807 mode = TYPE_MODE (type);
808 #ifdef PROMOTE_MODE
809 unsignedp = TYPE_UNSIGNED (type);
810 #endif
812 if (mode == BLKmode || memory_required)
814 HOST_WIDE_INT size = int_size_in_bytes (type);
815 tree size_tree;
816 rtx tmp;
818 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
819 problems with allocating the stack space. */
820 if (size == 0)
821 size = 1;
823 /* Unfortunately, we don't yet know how to allocate variable-sized
824 temporaries. However, sometimes we have a fixed upper limit on
825 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
826 instead. This is the case for Chill variable-sized strings. */
827 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
828 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
829 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
830 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
832 /* If we still haven't been able to get a size, see if the language
833 can compute a maximum size. */
834 if (size == -1
835 && (size_tree = lang_hooks.types.max_size (type)) != 0
836 && host_integerp (size_tree, 1))
837 size = tree_low_cst (size_tree, 1);
839 /* The size of the temporary may be too large to fit into an integer. */
840 /* ??? Not sure this should happen except for user silliness, so limit
841 this to things that aren't compiler-generated temporaries. The
842 rest of the time we'll die in assign_stack_temp_for_type. */
843 if (decl && size == -1
844 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
846 error ("size of variable %q+D is too large", decl);
847 size = 1;
850 tmp = assign_stack_temp_for_type (mode, size, keep, type);
851 return tmp;
854 #ifdef PROMOTE_MODE
855 if (! dont_promote)
856 mode = promote_mode (type, mode, &unsignedp, 0);
857 #endif
859 return gen_reg_rtx (mode);
862 /* Combine temporary stack slots which are adjacent on the stack.
864 This allows for better use of already allocated stack space. This is only
865 done for BLKmode slots because we can be sure that we won't have alignment
866 problems in this case. */
868 static void
869 combine_temp_slots (void)
871 struct temp_slot *p, *q, *next, *next_q;
872 int num_slots;
874 /* We can't combine slots, because the information about which slot
875 is in which alias set will be lost. */
876 if (flag_strict_aliasing)
877 return;
879 /* If there are a lot of temp slots, don't do anything unless
880 high levels of optimization. */
881 if (! flag_expensive_optimizations)
882 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
883 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
884 return;
886 for (p = avail_temp_slots; p; p = next)
888 int delete_p = 0;
890 next = p->next;
892 if (GET_MODE (p->slot) != BLKmode)
893 continue;
895 for (q = p->next; q; q = next_q)
897 int delete_q = 0;
899 next_q = q->next;
901 if (GET_MODE (q->slot) != BLKmode)
902 continue;
904 if (p->base_offset + p->full_size == q->base_offset)
906 /* Q comes after P; combine Q into P. */
907 p->size += q->size;
908 p->full_size += q->full_size;
909 delete_q = 1;
911 else if (q->base_offset + q->full_size == p->base_offset)
913 /* P comes after Q; combine P into Q. */
914 q->size += p->size;
915 q->full_size += p->full_size;
916 delete_p = 1;
917 break;
919 if (delete_q)
920 cut_slot_from_list (q, &avail_temp_slots);
923 /* Either delete P or advance past it. */
924 if (delete_p)
925 cut_slot_from_list (p, &avail_temp_slots);
929 /* Find the temp slot corresponding to the object at address X. */
931 static struct temp_slot *
932 find_temp_slot_from_address (rtx x)
934 struct temp_slot *p;
935 rtx next;
936 int i;
938 for (i = max_slot_level (); i >= 0; i--)
939 for (p = *temp_slots_at_level (i); p; p = p->next)
941 if (XEXP (p->slot, 0) == x
942 || p->address == x
943 || (GET_CODE (x) == PLUS
944 && XEXP (x, 0) == virtual_stack_vars_rtx
945 && GET_CODE (XEXP (x, 1)) == CONST_INT
946 && INTVAL (XEXP (x, 1)) >= p->base_offset
947 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
948 return p;
950 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
951 for (next = p->address; next; next = XEXP (next, 1))
952 if (XEXP (next, 0) == x)
953 return p;
956 /* If we have a sum involving a register, see if it points to a temp
957 slot. */
958 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
959 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
960 return p;
961 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
962 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
963 return p;
965 return 0;
968 /* Indicate that NEW is an alternate way of referring to the temp slot
969 that previously was known by OLD. */
971 void
972 update_temp_slot_address (rtx old, rtx new)
974 struct temp_slot *p;
976 if (rtx_equal_p (old, new))
977 return;
979 p = find_temp_slot_from_address (old);
981 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
982 is a register, see if one operand of the PLUS is a temporary
983 location. If so, NEW points into it. Otherwise, if both OLD and
984 NEW are a PLUS and if there is a register in common between them.
985 If so, try a recursive call on those values. */
986 if (p == 0)
988 if (GET_CODE (old) != PLUS)
989 return;
991 if (REG_P (new))
993 update_temp_slot_address (XEXP (old, 0), new);
994 update_temp_slot_address (XEXP (old, 1), new);
995 return;
997 else if (GET_CODE (new) != PLUS)
998 return;
1000 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1001 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1002 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1003 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1004 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1005 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1006 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1007 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1009 return;
1012 /* Otherwise add an alias for the temp's address. */
1013 else if (p->address == 0)
1014 p->address = new;
1015 else
1017 if (GET_CODE (p->address) != EXPR_LIST)
1018 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1020 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1024 /* If X could be a reference to a temporary slot, mark the fact that its
1025 address was taken. */
1027 void
1028 mark_temp_addr_taken (rtx x)
1030 struct temp_slot *p;
1032 if (x == 0)
1033 return;
1035 /* If X is not in memory or is at a constant address, it cannot be in
1036 a temporary slot. */
1037 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1038 return;
1040 p = find_temp_slot_from_address (XEXP (x, 0));
1041 if (p != 0)
1042 p->addr_taken = 1;
1045 /* If X could be a reference to a temporary slot, mark that slot as
1046 belonging to the to one level higher than the current level. If X
1047 matched one of our slots, just mark that one. Otherwise, we can't
1048 easily predict which it is, so upgrade all of them. Kept slots
1049 need not be touched.
1051 This is called when an ({...}) construct occurs and a statement
1052 returns a value in memory. */
1054 void
1055 preserve_temp_slots (rtx x)
1057 struct temp_slot *p = 0, *next;
1059 /* If there is no result, we still might have some objects whose address
1060 were taken, so we need to make sure they stay around. */
1061 if (x == 0)
1063 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1065 next = p->next;
1067 if (p->addr_taken)
1068 move_slot_to_level (p, temp_slot_level - 1);
1071 return;
1074 /* If X is a register that is being used as a pointer, see if we have
1075 a temporary slot we know it points to. To be consistent with
1076 the code below, we really should preserve all non-kept slots
1077 if we can't find a match, but that seems to be much too costly. */
1078 if (REG_P (x) && REG_POINTER (x))
1079 p = find_temp_slot_from_address (x);
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot, but it can contain something whose address was
1083 taken. */
1084 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1086 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1088 next = p->next;
1090 if (p->addr_taken)
1091 move_slot_to_level (p, temp_slot_level - 1);
1094 return;
1097 /* First see if we can find a match. */
1098 if (p == 0)
1099 p = find_temp_slot_from_address (XEXP (x, 0));
1101 if (p != 0)
1103 /* Move everything at our level whose address was taken to our new
1104 level in case we used its address. */
1105 struct temp_slot *q;
1107 if (p->level == temp_slot_level)
1109 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1111 next = q->next;
1113 if (p != q && q->addr_taken)
1114 move_slot_to_level (q, temp_slot_level - 1);
1117 move_slot_to_level (p, temp_slot_level - 1);
1118 p->addr_taken = 0;
1120 return;
1123 /* Otherwise, preserve all non-kept slots at this level. */
1124 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1126 next = p->next;
1128 if (!p->keep)
1129 move_slot_to_level (p, temp_slot_level - 1);
1133 /* Free all temporaries used so far. This is normally called at the
1134 end of generating code for a statement. */
1136 void
1137 free_temp_slots (void)
1139 struct temp_slot *p, *next;
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1143 next = p->next;
1145 if (!p->keep)
1146 make_slot_available (p);
1149 combine_temp_slots ();
1152 /* Push deeper into the nesting level for stack temporaries. */
1154 void
1155 push_temp_slots (void)
1157 temp_slot_level++;
1160 /* Pop a temporary nesting level. All slots in use in the current level
1161 are freed. */
1163 void
1164 pop_temp_slots (void)
1166 struct temp_slot *p, *next;
1168 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1170 next = p->next;
1171 make_slot_available (p);
1174 combine_temp_slots ();
1176 temp_slot_level--;
1179 /* Initialize temporary slots. */
1181 void
1182 init_temp_slots (void)
1184 /* We have not allocated any temporaries yet. */
1185 avail_temp_slots = 0;
1186 used_temp_slots = 0;
1187 temp_slot_level = 0;
1190 /* These routines are responsible for converting virtual register references
1191 to the actual hard register references once RTL generation is complete.
1193 The following four variables are used for communication between the
1194 routines. They contain the offsets of the virtual registers from their
1195 respective hard registers. */
1197 static int in_arg_offset;
1198 static int var_offset;
1199 static int dynamic_offset;
1200 static int out_arg_offset;
1201 static int cfa_offset;
1203 /* In most machines, the stack pointer register is equivalent to the bottom
1204 of the stack. */
1206 #ifndef STACK_POINTER_OFFSET
1207 #define STACK_POINTER_OFFSET 0
1208 #endif
1210 /* If not defined, pick an appropriate default for the offset of dynamically
1211 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1212 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1214 #ifndef STACK_DYNAMIC_OFFSET
1216 /* The bottom of the stack points to the actual arguments. If
1217 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1218 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1219 stack space for register parameters is not pushed by the caller, but
1220 rather part of the fixed stack areas and hence not included in
1221 `current_function_outgoing_args_size'. Nevertheless, we must allow
1222 for it when allocating stack dynamic objects. */
1224 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1225 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1226 ((ACCUMULATE_OUTGOING_ARGS \
1227 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1228 + (STACK_POINTER_OFFSET)) \
1230 #else
1231 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1232 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1233 + (STACK_POINTER_OFFSET))
1234 #endif
1235 #endif
1238 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1239 is a virtual register, return the equivalent hard register and set the
1240 offset indirectly through the pointer. Otherwise, return 0. */
1242 static rtx
1243 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1245 rtx new;
1246 HOST_WIDE_INT offset;
1248 if (x == virtual_incoming_args_rtx)
1249 new = arg_pointer_rtx, offset = in_arg_offset;
1250 else if (x == virtual_stack_vars_rtx)
1251 new = frame_pointer_rtx, offset = var_offset;
1252 else if (x == virtual_stack_dynamic_rtx)
1253 new = stack_pointer_rtx, offset = dynamic_offset;
1254 else if (x == virtual_outgoing_args_rtx)
1255 new = stack_pointer_rtx, offset = out_arg_offset;
1256 else if (x == virtual_cfa_rtx)
1258 #ifdef FRAME_POINTER_CFA_OFFSET
1259 new = frame_pointer_rtx;
1260 #else
1261 new = arg_pointer_rtx;
1262 #endif
1263 offset = cfa_offset;
1265 else
1266 return NULL_RTX;
1268 *poffset = offset;
1269 return new;
1272 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1273 Instantiate any virtual registers present inside of *LOC. The expression
1274 is simplified, as much as possible, but is not to be considered "valid"
1275 in any sense implied by the target. If any change is made, set CHANGED
1276 to true. */
1278 static int
1279 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1281 HOST_WIDE_INT offset;
1282 bool *changed = (bool *) data;
1283 rtx x, new;
1285 x = *loc;
1286 if (x == 0)
1287 return 0;
1289 switch (GET_CODE (x))
1291 case REG:
1292 new = instantiate_new_reg (x, &offset);
1293 if (new)
1295 *loc = plus_constant (new, offset);
1296 if (changed)
1297 *changed = true;
1299 return -1;
1301 case PLUS:
1302 new = instantiate_new_reg (XEXP (x, 0), &offset);
1303 if (new)
1305 new = plus_constant (new, offset);
1306 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1307 if (changed)
1308 *changed = true;
1309 return -1;
1312 /* FIXME -- from old code */
1313 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1314 we can commute the PLUS and SUBREG because pointers into the
1315 frame are well-behaved. */
1316 break;
1318 default:
1319 break;
1322 return 0;
1325 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1326 matches the predicate for insn CODE operand OPERAND. */
1328 static int
1329 safe_insn_predicate (int code, int operand, rtx x)
1331 const struct insn_operand_data *op_data;
1333 if (code < 0)
1334 return true;
1336 op_data = &insn_data[code].operand[operand];
1337 if (op_data->predicate == NULL)
1338 return true;
1340 return op_data->predicate (x, op_data->mode);
1343 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1344 registers present inside of insn. The result will be a valid insn. */
1346 static void
1347 instantiate_virtual_regs_in_insn (rtx insn)
1349 HOST_WIDE_INT offset;
1350 int insn_code, i;
1351 bool any_change = false;
1352 rtx set, new, x, seq;
1354 /* There are some special cases to be handled first. */
1355 set = single_set (insn);
1356 if (set)
1358 /* We're allowed to assign to a virtual register. This is interpreted
1359 to mean that the underlying register gets assigned the inverse
1360 transformation. This is used, for example, in the handling of
1361 non-local gotos. */
1362 new = instantiate_new_reg (SET_DEST (set), &offset);
1363 if (new)
1365 start_sequence ();
1367 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1368 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1369 GEN_INT (-offset));
1370 x = force_operand (x, new);
1371 if (x != new)
1372 emit_move_insn (new, x);
1374 seq = get_insns ();
1375 end_sequence ();
1377 emit_insn_before (seq, insn);
1378 delete_insn (insn);
1379 return;
1382 /* Handle a straight copy from a virtual register by generating a
1383 new add insn. The difference between this and falling through
1384 to the generic case is avoiding a new pseudo and eliminating a
1385 move insn in the initial rtl stream. */
1386 new = instantiate_new_reg (SET_SRC (set), &offset);
1387 if (new && offset != 0
1388 && REG_P (SET_DEST (set))
1389 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1391 start_sequence ();
1393 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1394 new, GEN_INT (offset), SET_DEST (set),
1395 1, OPTAB_LIB_WIDEN);
1396 if (x != SET_DEST (set))
1397 emit_move_insn (SET_DEST (set), x);
1399 seq = get_insns ();
1400 end_sequence ();
1402 emit_insn_before (seq, insn);
1403 delete_insn (insn);
1404 return;
1407 extract_insn (insn);
1408 insn_code = INSN_CODE (insn);
1410 /* Handle a plus involving a virtual register by determining if the
1411 operands remain valid if they're modified in place. */
1412 if (GET_CODE (SET_SRC (set)) == PLUS
1413 && recog_data.n_operands >= 3
1414 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1415 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1416 && GET_CODE (recog_data.operand[2]) == CONST_INT
1417 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1419 offset += INTVAL (recog_data.operand[2]);
1421 /* If the sum is zero, then replace with a plain move. */
1422 if (offset == 0
1423 && REG_P (SET_DEST (set))
1424 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1426 start_sequence ();
1427 emit_move_insn (SET_DEST (set), new);
1428 seq = get_insns ();
1429 end_sequence ();
1431 emit_insn_before (seq, insn);
1432 delete_insn (insn);
1433 return;
1436 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1438 /* Using validate_change and apply_change_group here leaves
1439 recog_data in an invalid state. Since we know exactly what
1440 we want to check, do those two by hand. */
1441 if (safe_insn_predicate (insn_code, 1, new)
1442 && safe_insn_predicate (insn_code, 2, x))
1444 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1445 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1446 any_change = true;
1448 /* Fall through into the regular operand fixup loop in
1449 order to take care of operands other than 1 and 2. */
1453 else
1455 extract_insn (insn);
1456 insn_code = INSN_CODE (insn);
1459 /* In the general case, we expect virtual registers to appear only in
1460 operands, and then only as either bare registers or inside memories. */
1461 for (i = 0; i < recog_data.n_operands; ++i)
1463 x = recog_data.operand[i];
1464 switch (GET_CODE (x))
1466 case MEM:
1468 rtx addr = XEXP (x, 0);
1469 bool changed = false;
1471 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1472 if (!changed)
1473 continue;
1475 start_sequence ();
1476 x = replace_equiv_address (x, addr);
1477 seq = get_insns ();
1478 end_sequence ();
1479 if (seq)
1480 emit_insn_before (seq, insn);
1482 break;
1484 case REG:
1485 new = instantiate_new_reg (x, &offset);
1486 if (new == NULL)
1487 continue;
1488 if (offset == 0)
1489 x = new;
1490 else
1492 start_sequence ();
1494 /* Careful, special mode predicates may have stuff in
1495 insn_data[insn_code].operand[i].mode that isn't useful
1496 to us for computing a new value. */
1497 /* ??? Recognize address_operand and/or "p" constraints
1498 to see if (plus new offset) is a valid before we put
1499 this through expand_simple_binop. */
1500 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1501 GEN_INT (offset), NULL_RTX,
1502 1, OPTAB_LIB_WIDEN);
1503 seq = get_insns ();
1504 end_sequence ();
1505 emit_insn_before (seq, insn);
1507 break;
1509 case SUBREG:
1510 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1511 if (new == NULL)
1512 continue;
1513 if (offset != 0)
1515 start_sequence ();
1516 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1517 GEN_INT (offset), NULL_RTX,
1518 1, OPTAB_LIB_WIDEN);
1519 seq = get_insns ();
1520 end_sequence ();
1521 emit_insn_before (seq, insn);
1523 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1524 GET_MODE (new), SUBREG_BYTE (x));
1525 break;
1527 default:
1528 continue;
1531 /* At this point, X contains the new value for the operand.
1532 Validate the new value vs the insn predicate. Note that
1533 asm insns will have insn_code -1 here. */
1534 if (!safe_insn_predicate (insn_code, i, x))
1535 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1537 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1538 any_change = true;
1541 if (any_change)
1543 /* Propagate operand changes into the duplicates. */
1544 for (i = 0; i < recog_data.n_dups; ++i)
1545 *recog_data.dup_loc[i]
1546 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1548 /* Force re-recognition of the instruction for validation. */
1549 INSN_CODE (insn) = -1;
1552 if (asm_noperands (PATTERN (insn)) >= 0)
1554 if (!check_asm_operands (PATTERN (insn)))
1556 error_for_asm (insn, "impossible constraint in %<asm%>");
1557 delete_insn (insn);
1560 else
1562 if (recog_memoized (insn) < 0)
1563 fatal_insn_not_found (insn);
1567 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1568 do any instantiation required. */
1570 static void
1571 instantiate_decl (rtx x)
1573 rtx addr;
1575 if (x == 0)
1576 return;
1578 /* If this is a CONCAT, recurse for the pieces. */
1579 if (GET_CODE (x) == CONCAT)
1581 instantiate_decl (XEXP (x, 0));
1582 instantiate_decl (XEXP (x, 1));
1583 return;
1586 /* If this is not a MEM, no need to do anything. Similarly if the
1587 address is a constant or a register that is not a virtual register. */
1588 if (!MEM_P (x))
1589 return;
1591 addr = XEXP (x, 0);
1592 if (CONSTANT_P (addr)
1593 || (REG_P (addr)
1594 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1595 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1596 return;
1598 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1601 /* Helper for instantiate_decls called via walk_tree: Process all decls
1602 in the given DECL_VALUE_EXPR. */
1604 static tree
1605 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1607 tree t = *tp;
1608 if (! EXPR_P (t))
1610 *walk_subtrees = 0;
1611 if (DECL_P (t) && DECL_RTL_SET_P (t))
1612 instantiate_decl (DECL_RTL (t));
1614 return NULL;
1617 /* Subroutine of instantiate_decls: Process all decls in the given
1618 BLOCK node and all its subblocks. */
1620 static void
1621 instantiate_decls_1 (tree let)
1623 tree t;
1625 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1627 if (DECL_RTL_SET_P (t))
1628 instantiate_decl (DECL_RTL (t));
1629 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1631 tree v = DECL_VALUE_EXPR (t);
1632 walk_tree (&v, instantiate_expr, NULL, NULL);
1636 /* Process all subblocks. */
1637 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1638 instantiate_decls_1 (t);
1641 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1642 all virtual registers in their DECL_RTL's. */
1644 static void
1645 instantiate_decls (tree fndecl)
1647 tree decl;
1649 /* Process all parameters of the function. */
1650 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1652 instantiate_decl (DECL_RTL (decl));
1653 instantiate_decl (DECL_INCOMING_RTL (decl));
1654 if (DECL_HAS_VALUE_EXPR_P (decl))
1656 tree v = DECL_VALUE_EXPR (decl);
1657 walk_tree (&v, instantiate_expr, NULL, NULL);
1661 /* Now process all variables defined in the function or its subblocks. */
1662 instantiate_decls_1 (DECL_INITIAL (fndecl));
1665 /* Pass through the INSNS of function FNDECL and convert virtual register
1666 references to hard register references. */
1668 static unsigned int
1669 instantiate_virtual_regs (void)
1671 rtx insn;
1673 /* Compute the offsets to use for this function. */
1674 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1675 var_offset = STARTING_FRAME_OFFSET;
1676 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1677 out_arg_offset = STACK_POINTER_OFFSET;
1678 #ifdef FRAME_POINTER_CFA_OFFSET
1679 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1680 #else
1681 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1682 #endif
1684 /* Initialize recognition, indicating that volatile is OK. */
1685 init_recog ();
1687 /* Scan through all the insns, instantiating every virtual register still
1688 present. */
1689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1690 if (INSN_P (insn))
1692 /* These patterns in the instruction stream can never be recognized.
1693 Fortunately, they shouldn't contain virtual registers either. */
1694 if (GET_CODE (PATTERN (insn)) == USE
1695 || GET_CODE (PATTERN (insn)) == CLOBBER
1696 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1697 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1698 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1699 continue;
1701 instantiate_virtual_regs_in_insn (insn);
1703 if (INSN_DELETED_P (insn))
1704 continue;
1706 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1708 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1709 if (GET_CODE (insn) == CALL_INSN)
1710 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1711 instantiate_virtual_regs_in_rtx, NULL);
1714 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1715 instantiate_decls (current_function_decl);
1717 /* Indicate that, from now on, assign_stack_local should use
1718 frame_pointer_rtx. */
1719 virtuals_instantiated = 1;
1720 return 0;
1723 struct tree_opt_pass pass_instantiate_virtual_regs =
1725 "vregs", /* name */
1726 NULL, /* gate */
1727 instantiate_virtual_regs, /* execute */
1728 NULL, /* sub */
1729 NULL, /* next */
1730 0, /* static_pass_number */
1731 0, /* tv_id */
1732 0, /* properties_required */
1733 0, /* properties_provided */
1734 0, /* properties_destroyed */
1735 0, /* todo_flags_start */
1736 TODO_dump_func, /* todo_flags_finish */
1737 0 /* letter */
1741 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1742 This means a type for which function calls must pass an address to the
1743 function or get an address back from the function.
1744 EXP may be a type node or an expression (whose type is tested). */
1747 aggregate_value_p (tree exp, tree fntype)
1749 int i, regno, nregs;
1750 rtx reg;
1752 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1754 if (fntype)
1755 switch (TREE_CODE (fntype))
1757 case CALL_EXPR:
1758 fntype = get_callee_fndecl (fntype);
1759 fntype = fntype ? TREE_TYPE (fntype) : 0;
1760 break;
1761 case FUNCTION_DECL:
1762 fntype = TREE_TYPE (fntype);
1763 break;
1764 case FUNCTION_TYPE:
1765 case METHOD_TYPE:
1766 break;
1767 case IDENTIFIER_NODE:
1768 fntype = 0;
1769 break;
1770 default:
1771 /* We don't expect other rtl types here. */
1772 gcc_unreachable ();
1775 if (TREE_CODE (type) == VOID_TYPE)
1776 return 0;
1777 /* If the front end has decided that this needs to be passed by
1778 reference, do so. */
1779 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1780 && DECL_BY_REFERENCE (exp))
1781 return 1;
1782 if (targetm.calls.return_in_memory (type, fntype))
1783 return 1;
1784 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1785 and thus can't be returned in registers. */
1786 if (TREE_ADDRESSABLE (type))
1787 return 1;
1788 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1789 return 1;
1790 /* Make sure we have suitable call-clobbered regs to return
1791 the value in; if not, we must return it in memory. */
1792 reg = hard_function_value (type, 0, fntype, 0);
1794 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1795 it is OK. */
1796 if (!REG_P (reg))
1797 return 0;
1799 regno = REGNO (reg);
1800 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1801 for (i = 0; i < nregs; i++)
1802 if (! call_used_regs[regno + i])
1803 return 1;
1804 return 0;
1807 /* Return true if we should assign DECL a pseudo register; false if it
1808 should live on the local stack. */
1810 bool
1811 use_register_for_decl (tree decl)
1813 /* Honor volatile. */
1814 if (TREE_SIDE_EFFECTS (decl))
1815 return false;
1817 /* Honor addressability. */
1818 if (TREE_ADDRESSABLE (decl))
1819 return false;
1821 /* Only register-like things go in registers. */
1822 if (DECL_MODE (decl) == BLKmode)
1823 return false;
1825 /* If -ffloat-store specified, don't put explicit float variables
1826 into registers. */
1827 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1828 propagates values across these stores, and it probably shouldn't. */
1829 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1830 return false;
1832 /* If we're not interested in tracking debugging information for
1833 this decl, then we can certainly put it in a register. */
1834 if (DECL_IGNORED_P (decl))
1835 return true;
1837 return (optimize || DECL_REGISTER (decl));
1840 /* Return true if TYPE should be passed by invisible reference. */
1842 bool
1843 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1844 tree type, bool named_arg)
1846 if (type)
1848 /* If this type contains non-trivial constructors, then it is
1849 forbidden for the middle-end to create any new copies. */
1850 if (TREE_ADDRESSABLE (type))
1851 return true;
1853 /* GCC post 3.4 passes *all* variable sized types by reference. */
1854 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1855 return true;
1858 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1861 /* Return true if TYPE, which is passed by reference, should be callee
1862 copied instead of caller copied. */
1864 bool
1865 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1866 tree type, bool named_arg)
1868 if (type && TREE_ADDRESSABLE (type))
1869 return false;
1870 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1873 /* Structures to communicate between the subroutines of assign_parms.
1874 The first holds data persistent across all parameters, the second
1875 is cleared out for each parameter. */
1877 struct assign_parm_data_all
1879 CUMULATIVE_ARGS args_so_far;
1880 struct args_size stack_args_size;
1881 tree function_result_decl;
1882 tree orig_fnargs;
1883 rtx conversion_insns;
1884 HOST_WIDE_INT pretend_args_size;
1885 HOST_WIDE_INT extra_pretend_bytes;
1886 int reg_parm_stack_space;
1889 struct assign_parm_data_one
1891 tree nominal_type;
1892 tree passed_type;
1893 rtx entry_parm;
1894 rtx stack_parm;
1895 enum machine_mode nominal_mode;
1896 enum machine_mode passed_mode;
1897 enum machine_mode promoted_mode;
1898 struct locate_and_pad_arg_data locate;
1899 int partial;
1900 BOOL_BITFIELD named_arg : 1;
1901 BOOL_BITFIELD passed_pointer : 1;
1902 BOOL_BITFIELD on_stack : 1;
1903 BOOL_BITFIELD loaded_in_reg : 1;
1906 /* A subroutine of assign_parms. Initialize ALL. */
1908 static void
1909 assign_parms_initialize_all (struct assign_parm_data_all *all)
1911 tree fntype;
1913 memset (all, 0, sizeof (*all));
1915 fntype = TREE_TYPE (current_function_decl);
1917 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1918 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1919 #else
1920 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1921 current_function_decl, -1);
1922 #endif
1924 #ifdef REG_PARM_STACK_SPACE
1925 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1926 #endif
1929 /* If ARGS contains entries with complex types, split the entry into two
1930 entries of the component type. Return a new list of substitutions are
1931 needed, else the old list. */
1933 static tree
1934 split_complex_args (tree args)
1936 tree p;
1938 /* Before allocating memory, check for the common case of no complex. */
1939 for (p = args; p; p = TREE_CHAIN (p))
1941 tree type = TREE_TYPE (p);
1942 if (TREE_CODE (type) == COMPLEX_TYPE
1943 && targetm.calls.split_complex_arg (type))
1944 goto found;
1946 return args;
1948 found:
1949 args = copy_list (args);
1951 for (p = args; p; p = TREE_CHAIN (p))
1953 tree type = TREE_TYPE (p);
1954 if (TREE_CODE (type) == COMPLEX_TYPE
1955 && targetm.calls.split_complex_arg (type))
1957 tree decl;
1958 tree subtype = TREE_TYPE (type);
1959 bool addressable = TREE_ADDRESSABLE (p);
1961 /* Rewrite the PARM_DECL's type with its component. */
1962 TREE_TYPE (p) = subtype;
1963 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1964 DECL_MODE (p) = VOIDmode;
1965 DECL_SIZE (p) = NULL;
1966 DECL_SIZE_UNIT (p) = NULL;
1967 /* If this arg must go in memory, put it in a pseudo here.
1968 We can't allow it to go in memory as per normal parms,
1969 because the usual place might not have the imag part
1970 adjacent to the real part. */
1971 DECL_ARTIFICIAL (p) = addressable;
1972 DECL_IGNORED_P (p) = addressable;
1973 TREE_ADDRESSABLE (p) = 0;
1974 layout_decl (p, 0);
1976 /* Build a second synthetic decl. */
1977 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1978 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1979 DECL_ARTIFICIAL (decl) = addressable;
1980 DECL_IGNORED_P (decl) = addressable;
1981 layout_decl (decl, 0);
1983 /* Splice it in; skip the new decl. */
1984 TREE_CHAIN (decl) = TREE_CHAIN (p);
1985 TREE_CHAIN (p) = decl;
1986 p = decl;
1990 return args;
1993 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1994 the hidden struct return argument, and (abi willing) complex args.
1995 Return the new parameter list. */
1997 static tree
1998 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2000 tree fndecl = current_function_decl;
2001 tree fntype = TREE_TYPE (fndecl);
2002 tree fnargs = DECL_ARGUMENTS (fndecl);
2004 /* If struct value address is treated as the first argument, make it so. */
2005 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2006 && ! current_function_returns_pcc_struct
2007 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2009 tree type = build_pointer_type (TREE_TYPE (fntype));
2010 tree decl;
2012 decl = build_decl (PARM_DECL, NULL_TREE, type);
2013 DECL_ARG_TYPE (decl) = type;
2014 DECL_ARTIFICIAL (decl) = 1;
2015 DECL_IGNORED_P (decl) = 1;
2017 TREE_CHAIN (decl) = fnargs;
2018 fnargs = decl;
2019 all->function_result_decl = decl;
2022 all->orig_fnargs = fnargs;
2024 /* If the target wants to split complex arguments into scalars, do so. */
2025 if (targetm.calls.split_complex_arg)
2026 fnargs = split_complex_args (fnargs);
2028 return fnargs;
2031 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2032 data for the parameter. Incorporate ABI specifics such as pass-by-
2033 reference and type promotion. */
2035 static void
2036 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2037 struct assign_parm_data_one *data)
2039 tree nominal_type, passed_type;
2040 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2042 memset (data, 0, sizeof (*data));
2044 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2045 if (!current_function_stdarg)
2046 data->named_arg = 1; /* No varadic parms. */
2047 else if (TREE_CHAIN (parm))
2048 data->named_arg = 1; /* Not the last non-varadic parm. */
2049 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2050 data->named_arg = 1; /* Only varadic ones are unnamed. */
2051 else
2052 data->named_arg = 0; /* Treat as varadic. */
2054 nominal_type = TREE_TYPE (parm);
2055 passed_type = DECL_ARG_TYPE (parm);
2057 /* Look out for errors propagating this far. Also, if the parameter's
2058 type is void then its value doesn't matter. */
2059 if (TREE_TYPE (parm) == error_mark_node
2060 /* This can happen after weird syntax errors
2061 or if an enum type is defined among the parms. */
2062 || TREE_CODE (parm) != PARM_DECL
2063 || passed_type == NULL
2064 || VOID_TYPE_P (nominal_type))
2066 nominal_type = passed_type = void_type_node;
2067 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2068 goto egress;
2071 /* Find mode of arg as it is passed, and mode of arg as it should be
2072 during execution of this function. */
2073 passed_mode = TYPE_MODE (passed_type);
2074 nominal_mode = TYPE_MODE (nominal_type);
2076 /* If the parm is to be passed as a transparent union, use the type of
2077 the first field for the tests below. We have already verified that
2078 the modes are the same. */
2079 if (TREE_CODE (passed_type) == UNION_TYPE
2080 && TYPE_TRANSPARENT_UNION (passed_type))
2081 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2083 /* See if this arg was passed by invisible reference. */
2084 if (pass_by_reference (&all->args_so_far, passed_mode,
2085 passed_type, data->named_arg))
2087 passed_type = nominal_type = build_pointer_type (passed_type);
2088 data->passed_pointer = true;
2089 passed_mode = nominal_mode = Pmode;
2092 /* Find mode as it is passed by the ABI. */
2093 promoted_mode = passed_mode;
2094 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2096 int unsignedp = TYPE_UNSIGNED (passed_type);
2097 promoted_mode = promote_mode (passed_type, promoted_mode,
2098 &unsignedp, 1);
2101 egress:
2102 data->nominal_type = nominal_type;
2103 data->passed_type = passed_type;
2104 data->nominal_mode = nominal_mode;
2105 data->passed_mode = passed_mode;
2106 data->promoted_mode = promoted_mode;
2109 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2111 static void
2112 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2113 struct assign_parm_data_one *data, bool no_rtl)
2115 int varargs_pretend_bytes = 0;
2117 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2118 data->promoted_mode,
2119 data->passed_type,
2120 &varargs_pretend_bytes, no_rtl);
2122 /* If the back-end has requested extra stack space, record how much is
2123 needed. Do not change pretend_args_size otherwise since it may be
2124 nonzero from an earlier partial argument. */
2125 if (varargs_pretend_bytes > 0)
2126 all->pretend_args_size = varargs_pretend_bytes;
2129 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2130 the incoming location of the current parameter. */
2132 static void
2133 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2134 struct assign_parm_data_one *data)
2136 HOST_WIDE_INT pretend_bytes = 0;
2137 rtx entry_parm;
2138 bool in_regs;
2140 if (data->promoted_mode == VOIDmode)
2142 data->entry_parm = data->stack_parm = const0_rtx;
2143 return;
2146 #ifdef FUNCTION_INCOMING_ARG
2147 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2148 data->passed_type, data->named_arg);
2149 #else
2150 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2151 data->passed_type, data->named_arg);
2152 #endif
2154 if (entry_parm == 0)
2155 data->promoted_mode = data->passed_mode;
2157 /* Determine parm's home in the stack, in case it arrives in the stack
2158 or we should pretend it did. Compute the stack position and rtx where
2159 the argument arrives and its size.
2161 There is one complexity here: If this was a parameter that would
2162 have been passed in registers, but wasn't only because it is
2163 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2164 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2165 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2166 as it was the previous time. */
2167 in_regs = entry_parm != 0;
2168 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2169 in_regs = true;
2170 #endif
2171 if (!in_regs && !data->named_arg)
2173 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2175 rtx tem;
2176 #ifdef FUNCTION_INCOMING_ARG
2177 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2178 data->passed_type, true);
2179 #else
2180 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2181 data->passed_type, true);
2182 #endif
2183 in_regs = tem != NULL;
2187 /* If this parameter was passed both in registers and in the stack, use
2188 the copy on the stack. */
2189 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2190 data->passed_type))
2191 entry_parm = 0;
2193 if (entry_parm)
2195 int partial;
2197 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2198 data->promoted_mode,
2199 data->passed_type,
2200 data->named_arg);
2201 data->partial = partial;
2203 /* The caller might already have allocated stack space for the
2204 register parameters. */
2205 if (partial != 0 && all->reg_parm_stack_space == 0)
2207 /* Part of this argument is passed in registers and part
2208 is passed on the stack. Ask the prologue code to extend
2209 the stack part so that we can recreate the full value.
2211 PRETEND_BYTES is the size of the registers we need to store.
2212 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2213 stack space that the prologue should allocate.
2215 Internally, gcc assumes that the argument pointer is aligned
2216 to STACK_BOUNDARY bits. This is used both for alignment
2217 optimizations (see init_emit) and to locate arguments that are
2218 aligned to more than PARM_BOUNDARY bits. We must preserve this
2219 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2220 a stack boundary. */
2222 /* We assume at most one partial arg, and it must be the first
2223 argument on the stack. */
2224 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2226 pretend_bytes = partial;
2227 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2229 /* We want to align relative to the actual stack pointer, so
2230 don't include this in the stack size until later. */
2231 all->extra_pretend_bytes = all->pretend_args_size;
2235 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2236 entry_parm ? data->partial : 0, current_function_decl,
2237 &all->stack_args_size, &data->locate);
2239 /* Adjust offsets to include the pretend args. */
2240 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2241 data->locate.slot_offset.constant += pretend_bytes;
2242 data->locate.offset.constant += pretend_bytes;
2244 data->entry_parm = entry_parm;
2247 /* A subroutine of assign_parms. If there is actually space on the stack
2248 for this parm, count it in stack_args_size and return true. */
2250 static bool
2251 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2252 struct assign_parm_data_one *data)
2254 /* Trivially true if we've no incoming register. */
2255 if (data->entry_parm == NULL)
2257 /* Also true if we're partially in registers and partially not,
2258 since we've arranged to drop the entire argument on the stack. */
2259 else if (data->partial != 0)
2261 /* Also true if the target says that it's passed in both registers
2262 and on the stack. */
2263 else if (GET_CODE (data->entry_parm) == PARALLEL
2264 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2266 /* Also true if the target says that there's stack allocated for
2267 all register parameters. */
2268 else if (all->reg_parm_stack_space > 0)
2270 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2271 else
2272 return false;
2274 all->stack_args_size.constant += data->locate.size.constant;
2275 if (data->locate.size.var)
2276 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2278 return true;
2281 /* A subroutine of assign_parms. Given that this parameter is allocated
2282 stack space by the ABI, find it. */
2284 static void
2285 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2287 rtx offset_rtx, stack_parm;
2288 unsigned int align, boundary;
2290 /* If we're passing this arg using a reg, make its stack home the
2291 aligned stack slot. */
2292 if (data->entry_parm)
2293 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2294 else
2295 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2297 stack_parm = current_function_internal_arg_pointer;
2298 if (offset_rtx != const0_rtx)
2299 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2300 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2302 set_mem_attributes (stack_parm, parm, 1);
2304 boundary = data->locate.boundary;
2305 align = BITS_PER_UNIT;
2307 /* If we're padding upward, we know that the alignment of the slot
2308 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2309 intentionally forcing upward padding. Otherwise we have to come
2310 up with a guess at the alignment based on OFFSET_RTX. */
2311 if (data->locate.where_pad != downward || data->entry_parm)
2312 align = boundary;
2313 else if (GET_CODE (offset_rtx) == CONST_INT)
2315 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2316 align = align & -align;
2318 set_mem_align (stack_parm, align);
2320 if (data->entry_parm)
2321 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2323 data->stack_parm = stack_parm;
2326 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2327 always valid and contiguous. */
2329 static void
2330 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2332 rtx entry_parm = data->entry_parm;
2333 rtx stack_parm = data->stack_parm;
2335 /* If this parm was passed part in regs and part in memory, pretend it
2336 arrived entirely in memory by pushing the register-part onto the stack.
2337 In the special case of a DImode or DFmode that is split, we could put
2338 it together in a pseudoreg directly, but for now that's not worth
2339 bothering with. */
2340 if (data->partial != 0)
2342 /* Handle calls that pass values in multiple non-contiguous
2343 locations. The Irix 6 ABI has examples of this. */
2344 if (GET_CODE (entry_parm) == PARALLEL)
2345 emit_group_store (validize_mem (stack_parm), entry_parm,
2346 data->passed_type,
2347 int_size_in_bytes (data->passed_type));
2348 else
2350 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2351 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2352 data->partial / UNITS_PER_WORD);
2355 entry_parm = stack_parm;
2358 /* If we didn't decide this parm came in a register, by default it came
2359 on the stack. */
2360 else if (entry_parm == NULL)
2361 entry_parm = stack_parm;
2363 /* When an argument is passed in multiple locations, we can't make use
2364 of this information, but we can save some copying if the whole argument
2365 is passed in a single register. */
2366 else if (GET_CODE (entry_parm) == PARALLEL
2367 && data->nominal_mode != BLKmode
2368 && data->passed_mode != BLKmode)
2370 size_t i, len = XVECLEN (entry_parm, 0);
2372 for (i = 0; i < len; i++)
2373 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2374 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2375 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2376 == data->passed_mode)
2377 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2379 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2380 break;
2384 data->entry_parm = entry_parm;
2387 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2388 always valid and properly aligned. */
2390 static void
2391 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2393 rtx stack_parm = data->stack_parm;
2395 /* If we can't trust the parm stack slot to be aligned enough for its
2396 ultimate type, don't use that slot after entry. We'll make another
2397 stack slot, if we need one. */
2398 if (stack_parm
2399 && ((STRICT_ALIGNMENT
2400 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2401 || (data->nominal_type
2402 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2403 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2404 stack_parm = NULL;
2406 /* If parm was passed in memory, and we need to convert it on entry,
2407 don't store it back in that same slot. */
2408 else if (data->entry_parm == stack_parm
2409 && data->nominal_mode != BLKmode
2410 && data->nominal_mode != data->passed_mode)
2411 stack_parm = NULL;
2413 /* If stack protection is in effect for this function, don't leave any
2414 pointers in their passed stack slots. */
2415 else if (cfun->stack_protect_guard
2416 && (flag_stack_protect == 2
2417 || data->passed_pointer
2418 || POINTER_TYPE_P (data->nominal_type)))
2419 stack_parm = NULL;
2421 data->stack_parm = stack_parm;
2424 /* A subroutine of assign_parms. Return true if the current parameter
2425 should be stored as a BLKmode in the current frame. */
2427 static bool
2428 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2430 if (data->nominal_mode == BLKmode)
2431 return true;
2432 if (GET_CODE (data->entry_parm) == PARALLEL)
2433 return true;
2435 #ifdef BLOCK_REG_PADDING
2436 /* Only assign_parm_setup_block knows how to deal with register arguments
2437 that are padded at the least significant end. */
2438 if (REG_P (data->entry_parm)
2439 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2440 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2441 == (BYTES_BIG_ENDIAN ? upward : downward)))
2442 return true;
2443 #endif
2445 return false;
2448 /* A subroutine of assign_parms. Arrange for the parameter to be
2449 present and valid in DATA->STACK_RTL. */
2451 static void
2452 assign_parm_setup_block (struct assign_parm_data_all *all,
2453 tree parm, struct assign_parm_data_one *data)
2455 rtx entry_parm = data->entry_parm;
2456 rtx stack_parm = data->stack_parm;
2457 HOST_WIDE_INT size;
2458 HOST_WIDE_INT size_stored;
2459 rtx orig_entry_parm = entry_parm;
2461 if (GET_CODE (entry_parm) == PARALLEL)
2462 entry_parm = emit_group_move_into_temps (entry_parm);
2464 /* If we've a non-block object that's nevertheless passed in parts,
2465 reconstitute it in register operations rather than on the stack. */
2466 if (GET_CODE (entry_parm) == PARALLEL
2467 && data->nominal_mode != BLKmode)
2469 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2471 if ((XVECLEN (entry_parm, 0) > 1
2472 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2473 && use_register_for_decl (parm))
2475 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2477 push_to_sequence (all->conversion_insns);
2479 /* For values returned in multiple registers, handle possible
2480 incompatible calls to emit_group_store.
2482 For example, the following would be invalid, and would have to
2483 be fixed by the conditional below:
2485 emit_group_store ((reg:SF), (parallel:DF))
2486 emit_group_store ((reg:SI), (parallel:DI))
2488 An example of this are doubles in e500 v2:
2489 (parallel:DF (expr_list (reg:SI) (const_int 0))
2490 (expr_list (reg:SI) (const_int 4))). */
2491 if (data->nominal_mode != data->passed_mode)
2493 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2494 emit_group_store (t, entry_parm, NULL_TREE,
2495 GET_MODE_SIZE (GET_MODE (entry_parm)));
2496 convert_move (parmreg, t, 0);
2498 else
2499 emit_group_store (parmreg, entry_parm, data->nominal_type,
2500 int_size_in_bytes (data->nominal_type));
2502 all->conversion_insns = get_insns ();
2503 end_sequence ();
2505 SET_DECL_RTL (parm, parmreg);
2506 return;
2510 size = int_size_in_bytes (data->passed_type);
2511 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2512 if (stack_parm == 0)
2514 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2515 stack_parm = assign_stack_local (BLKmode, size_stored,
2516 DECL_ALIGN (parm));
2517 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2518 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2519 set_mem_attributes (stack_parm, parm, 1);
2522 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2523 calls that pass values in multiple non-contiguous locations. */
2524 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2526 rtx mem;
2528 /* Note that we will be storing an integral number of words.
2529 So we have to be careful to ensure that we allocate an
2530 integral number of words. We do this above when we call
2531 assign_stack_local if space was not allocated in the argument
2532 list. If it was, this will not work if PARM_BOUNDARY is not
2533 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2534 if it becomes a problem. Exception is when BLKmode arrives
2535 with arguments not conforming to word_mode. */
2537 if (data->stack_parm == 0)
2539 else if (GET_CODE (entry_parm) == PARALLEL)
2541 else
2542 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2544 mem = validize_mem (stack_parm);
2546 /* Handle values in multiple non-contiguous locations. */
2547 if (GET_CODE (entry_parm) == PARALLEL)
2549 push_to_sequence (all->conversion_insns);
2550 emit_group_store (mem, entry_parm, data->passed_type, size);
2551 all->conversion_insns = get_insns ();
2552 end_sequence ();
2555 else if (size == 0)
2558 /* If SIZE is that of a mode no bigger than a word, just use
2559 that mode's store operation. */
2560 else if (size <= UNITS_PER_WORD)
2562 enum machine_mode mode
2563 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2565 if (mode != BLKmode
2566 #ifdef BLOCK_REG_PADDING
2567 && (size == UNITS_PER_WORD
2568 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2569 != (BYTES_BIG_ENDIAN ? upward : downward)))
2570 #endif
2573 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2574 emit_move_insn (change_address (mem, mode, 0), reg);
2577 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2578 machine must be aligned to the left before storing
2579 to memory. Note that the previous test doesn't
2580 handle all cases (e.g. SIZE == 3). */
2581 else if (size != UNITS_PER_WORD
2582 #ifdef BLOCK_REG_PADDING
2583 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2584 == downward)
2585 #else
2586 && BYTES_BIG_ENDIAN
2587 #endif
2590 rtx tem, x;
2591 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2592 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2594 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2595 build_int_cst (NULL_TREE, by),
2596 NULL_RTX, 1);
2597 tem = change_address (mem, word_mode, 0);
2598 emit_move_insn (tem, x);
2600 else
2601 move_block_from_reg (REGNO (entry_parm), mem,
2602 size_stored / UNITS_PER_WORD);
2604 else
2605 move_block_from_reg (REGNO (entry_parm), mem,
2606 size_stored / UNITS_PER_WORD);
2608 else if (data->stack_parm == 0)
2610 push_to_sequence (all->conversion_insns);
2611 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2612 BLOCK_OP_NORMAL);
2613 all->conversion_insns = get_insns ();
2614 end_sequence ();
2617 data->stack_parm = stack_parm;
2618 SET_DECL_RTL (parm, stack_parm);
2621 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2622 parameter. Get it there. Perform all ABI specified conversions. */
2624 static void
2625 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2626 struct assign_parm_data_one *data)
2628 rtx parmreg;
2629 enum machine_mode promoted_nominal_mode;
2630 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2631 bool did_conversion = false;
2633 /* Store the parm in a pseudoregister during the function, but we may
2634 need to do it in a wider mode. */
2636 /* This is not really promoting for a call. However we need to be
2637 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2638 promoted_nominal_mode
2639 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2641 parmreg = gen_reg_rtx (promoted_nominal_mode);
2643 if (!DECL_ARTIFICIAL (parm))
2644 mark_user_reg (parmreg);
2646 /* If this was an item that we received a pointer to,
2647 set DECL_RTL appropriately. */
2648 if (data->passed_pointer)
2650 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2651 set_mem_attributes (x, parm, 1);
2652 SET_DECL_RTL (parm, x);
2654 else
2655 SET_DECL_RTL (parm, parmreg);
2657 /* Copy the value into the register. */
2658 if (data->nominal_mode != data->passed_mode
2659 || promoted_nominal_mode != data->promoted_mode)
2661 int save_tree_used;
2663 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2664 mode, by the caller. We now have to convert it to
2665 NOMINAL_MODE, if different. However, PARMREG may be in
2666 a different mode than NOMINAL_MODE if it is being stored
2667 promoted.
2669 If ENTRY_PARM is a hard register, it might be in a register
2670 not valid for operating in its mode (e.g., an odd-numbered
2671 register for a DFmode). In that case, moves are the only
2672 thing valid, so we can't do a convert from there. This
2673 occurs when the calling sequence allow such misaligned
2674 usages.
2676 In addition, the conversion may involve a call, which could
2677 clobber parameters which haven't been copied to pseudo
2678 registers yet. Therefore, we must first copy the parm to
2679 a pseudo reg here, and save the conversion until after all
2680 parameters have been moved. */
2682 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2684 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2686 push_to_sequence (all->conversion_insns);
2687 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2689 if (GET_CODE (tempreg) == SUBREG
2690 && GET_MODE (tempreg) == data->nominal_mode
2691 && REG_P (SUBREG_REG (tempreg))
2692 && data->nominal_mode == data->passed_mode
2693 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2694 && GET_MODE_SIZE (GET_MODE (tempreg))
2695 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2697 /* The argument is already sign/zero extended, so note it
2698 into the subreg. */
2699 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2700 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2703 /* TREE_USED gets set erroneously during expand_assignment. */
2704 save_tree_used = TREE_USED (parm);
2705 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2706 TREE_USED (parm) = save_tree_used;
2707 all->conversion_insns = get_insns ();
2708 end_sequence ();
2710 did_conversion = true;
2712 else
2713 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2715 /* If we were passed a pointer but the actual value can safely live
2716 in a register, put it in one. */
2717 if (data->passed_pointer
2718 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2719 /* If by-reference argument was promoted, demote it. */
2720 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2721 || use_register_for_decl (parm)))
2723 /* We can't use nominal_mode, because it will have been set to
2724 Pmode above. We must use the actual mode of the parm. */
2725 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2726 mark_user_reg (parmreg);
2728 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2730 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2731 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2733 push_to_sequence (all->conversion_insns);
2734 emit_move_insn (tempreg, DECL_RTL (parm));
2735 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2736 emit_move_insn (parmreg, tempreg);
2737 all->conversion_insns = get_insns ();
2738 end_sequence ();
2740 did_conversion = true;
2742 else
2743 emit_move_insn (parmreg, DECL_RTL (parm));
2745 SET_DECL_RTL (parm, parmreg);
2747 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2748 now the parm. */
2749 data->stack_parm = NULL;
2752 /* Mark the register as eliminable if we did no conversion and it was
2753 copied from memory at a fixed offset, and the arg pointer was not
2754 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2755 offset formed an invalid address, such memory-equivalences as we
2756 make here would screw up life analysis for it. */
2757 if (data->nominal_mode == data->passed_mode
2758 && !did_conversion
2759 && data->stack_parm != 0
2760 && MEM_P (data->stack_parm)
2761 && data->locate.offset.var == 0
2762 && reg_mentioned_p (virtual_incoming_args_rtx,
2763 XEXP (data->stack_parm, 0)))
2765 rtx linsn = get_last_insn ();
2766 rtx sinsn, set;
2768 /* Mark complex types separately. */
2769 if (GET_CODE (parmreg) == CONCAT)
2771 enum machine_mode submode
2772 = GET_MODE_INNER (GET_MODE (parmreg));
2773 int regnor = REGNO (XEXP (parmreg, 0));
2774 int regnoi = REGNO (XEXP (parmreg, 1));
2775 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2776 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2777 GET_MODE_SIZE (submode));
2779 /* Scan backwards for the set of the real and
2780 imaginary parts. */
2781 for (sinsn = linsn; sinsn != 0;
2782 sinsn = prev_nonnote_insn (sinsn))
2784 set = single_set (sinsn);
2785 if (set == 0)
2786 continue;
2788 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2789 REG_NOTES (sinsn)
2790 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2791 REG_NOTES (sinsn));
2792 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2793 REG_NOTES (sinsn)
2794 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2795 REG_NOTES (sinsn));
2798 else if ((set = single_set (linsn)) != 0
2799 && SET_DEST (set) == parmreg)
2800 REG_NOTES (linsn)
2801 = gen_rtx_EXPR_LIST (REG_EQUIV,
2802 data->stack_parm, REG_NOTES (linsn));
2805 /* For pointer data type, suggest pointer register. */
2806 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2807 mark_reg_pointer (parmreg,
2808 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2811 /* A subroutine of assign_parms. Allocate stack space to hold the current
2812 parameter. Get it there. Perform all ABI specified conversions. */
2814 static void
2815 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2816 struct assign_parm_data_one *data)
2818 /* Value must be stored in the stack slot STACK_PARM during function
2819 execution. */
2820 bool to_conversion = false;
2822 if (data->promoted_mode != data->nominal_mode)
2824 /* Conversion is required. */
2825 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2827 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2829 push_to_sequence (all->conversion_insns);
2830 to_conversion = true;
2832 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2833 TYPE_UNSIGNED (TREE_TYPE (parm)));
2835 if (data->stack_parm)
2836 /* ??? This may need a big-endian conversion on sparc64. */
2837 data->stack_parm
2838 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2841 if (data->entry_parm != data->stack_parm)
2843 rtx src, dest;
2845 if (data->stack_parm == 0)
2847 data->stack_parm
2848 = assign_stack_local (GET_MODE (data->entry_parm),
2849 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2850 TYPE_ALIGN (data->passed_type));
2851 set_mem_attributes (data->stack_parm, parm, 1);
2854 dest = validize_mem (data->stack_parm);
2855 src = validize_mem (data->entry_parm);
2857 if (MEM_P (src))
2859 /* Use a block move to handle potentially misaligned entry_parm. */
2860 if (!to_conversion)
2861 push_to_sequence (all->conversion_insns);
2862 to_conversion = true;
2864 emit_block_move (dest, src,
2865 GEN_INT (int_size_in_bytes (data->passed_type)),
2866 BLOCK_OP_NORMAL);
2868 else
2869 emit_move_insn (dest, src);
2872 if (to_conversion)
2874 all->conversion_insns = get_insns ();
2875 end_sequence ();
2878 SET_DECL_RTL (parm, data->stack_parm);
2881 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2882 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2884 static void
2885 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2887 tree parm;
2888 tree orig_fnargs = all->orig_fnargs;
2890 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2892 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2893 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2895 rtx tmp, real, imag;
2896 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2898 real = DECL_RTL (fnargs);
2899 imag = DECL_RTL (TREE_CHAIN (fnargs));
2900 if (inner != GET_MODE (real))
2902 real = gen_lowpart_SUBREG (inner, real);
2903 imag = gen_lowpart_SUBREG (inner, imag);
2906 if (TREE_ADDRESSABLE (parm))
2908 rtx rmem, imem;
2909 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2911 /* split_complex_arg put the real and imag parts in
2912 pseudos. Move them to memory. */
2913 tmp = assign_stack_local (DECL_MODE (parm), size,
2914 TYPE_ALIGN (TREE_TYPE (parm)));
2915 set_mem_attributes (tmp, parm, 1);
2916 rmem = adjust_address_nv (tmp, inner, 0);
2917 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2918 push_to_sequence (all->conversion_insns);
2919 emit_move_insn (rmem, real);
2920 emit_move_insn (imem, imag);
2921 all->conversion_insns = get_insns ();
2922 end_sequence ();
2924 else
2925 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2926 SET_DECL_RTL (parm, tmp);
2928 real = DECL_INCOMING_RTL (fnargs);
2929 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2930 if (inner != GET_MODE (real))
2932 real = gen_lowpart_SUBREG (inner, real);
2933 imag = gen_lowpart_SUBREG (inner, imag);
2935 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2936 set_decl_incoming_rtl (parm, tmp);
2937 fnargs = TREE_CHAIN (fnargs);
2939 else
2941 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2942 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2944 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2945 instead of the copy of decl, i.e. FNARGS. */
2946 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2947 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2950 fnargs = TREE_CHAIN (fnargs);
2954 /* Assign RTL expressions to the function's parameters. This may involve
2955 copying them into registers and using those registers as the DECL_RTL. */
2957 static void
2958 assign_parms (tree fndecl)
2960 struct assign_parm_data_all all;
2961 tree fnargs, parm;
2963 current_function_internal_arg_pointer
2964 = targetm.calls.internal_arg_pointer ();
2966 assign_parms_initialize_all (&all);
2967 fnargs = assign_parms_augmented_arg_list (&all);
2969 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2971 struct assign_parm_data_one data;
2973 /* Extract the type of PARM; adjust it according to ABI. */
2974 assign_parm_find_data_types (&all, parm, &data);
2976 /* Early out for errors and void parameters. */
2977 if (data.passed_mode == VOIDmode)
2979 SET_DECL_RTL (parm, const0_rtx);
2980 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2981 continue;
2984 if (current_function_stdarg && !TREE_CHAIN (parm))
2985 assign_parms_setup_varargs (&all, &data, false);
2987 /* Find out where the parameter arrives in this function. */
2988 assign_parm_find_entry_rtl (&all, &data);
2990 /* Find out where stack space for this parameter might be. */
2991 if (assign_parm_is_stack_parm (&all, &data))
2993 assign_parm_find_stack_rtl (parm, &data);
2994 assign_parm_adjust_entry_rtl (&data);
2997 /* Record permanently how this parm was passed. */
2998 set_decl_incoming_rtl (parm, data.entry_parm);
3000 /* Update info on where next arg arrives in registers. */
3001 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3002 data.passed_type, data.named_arg);
3004 assign_parm_adjust_stack_rtl (&data);
3006 if (assign_parm_setup_block_p (&data))
3007 assign_parm_setup_block (&all, parm, &data);
3008 else if (data.passed_pointer || use_register_for_decl (parm))
3009 assign_parm_setup_reg (&all, parm, &data);
3010 else
3011 assign_parm_setup_stack (&all, parm, &data);
3014 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3015 assign_parms_unsplit_complex (&all, fnargs);
3017 /* Output all parameter conversion instructions (possibly including calls)
3018 now that all parameters have been copied out of hard registers. */
3019 emit_insn (all.conversion_insns);
3021 /* If we are receiving a struct value address as the first argument, set up
3022 the RTL for the function result. As this might require code to convert
3023 the transmitted address to Pmode, we do this here to ensure that possible
3024 preliminary conversions of the address have been emitted already. */
3025 if (all.function_result_decl)
3027 tree result = DECL_RESULT (current_function_decl);
3028 rtx addr = DECL_RTL (all.function_result_decl);
3029 rtx x;
3031 if (DECL_BY_REFERENCE (result))
3032 x = addr;
3033 else
3035 addr = convert_memory_address (Pmode, addr);
3036 x = gen_rtx_MEM (DECL_MODE (result), addr);
3037 set_mem_attributes (x, result, 1);
3039 SET_DECL_RTL (result, x);
3042 /* We have aligned all the args, so add space for the pretend args. */
3043 current_function_pretend_args_size = all.pretend_args_size;
3044 all.stack_args_size.constant += all.extra_pretend_bytes;
3045 current_function_args_size = all.stack_args_size.constant;
3047 /* Adjust function incoming argument size for alignment and
3048 minimum length. */
3050 #ifdef REG_PARM_STACK_SPACE
3051 current_function_args_size = MAX (current_function_args_size,
3052 REG_PARM_STACK_SPACE (fndecl));
3053 #endif
3055 current_function_args_size = CEIL_ROUND (current_function_args_size,
3056 PARM_BOUNDARY / BITS_PER_UNIT);
3058 #ifdef ARGS_GROW_DOWNWARD
3059 current_function_arg_offset_rtx
3060 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3061 : expand_expr (size_diffop (all.stack_args_size.var,
3062 size_int (-all.stack_args_size.constant)),
3063 NULL_RTX, VOIDmode, 0));
3064 #else
3065 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3066 #endif
3068 /* See how many bytes, if any, of its args a function should try to pop
3069 on return. */
3071 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3072 current_function_args_size);
3074 /* For stdarg.h function, save info about
3075 regs and stack space used by the named args. */
3077 current_function_args_info = all.args_so_far;
3079 /* Set the rtx used for the function return value. Put this in its
3080 own variable so any optimizers that need this information don't have
3081 to include tree.h. Do this here so it gets done when an inlined
3082 function gets output. */
3084 current_function_return_rtx
3085 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3086 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3088 /* If scalar return value was computed in a pseudo-reg, or was a named
3089 return value that got dumped to the stack, copy that to the hard
3090 return register. */
3091 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3093 tree decl_result = DECL_RESULT (fndecl);
3094 rtx decl_rtl = DECL_RTL (decl_result);
3096 if (REG_P (decl_rtl)
3097 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3098 : DECL_REGISTER (decl_result))
3100 rtx real_decl_rtl;
3102 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3103 fndecl, true);
3104 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3105 /* The delay slot scheduler assumes that current_function_return_rtx
3106 holds the hard register containing the return value, not a
3107 temporary pseudo. */
3108 current_function_return_rtx = real_decl_rtl;
3113 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3114 For all seen types, gimplify their sizes. */
3116 static tree
3117 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3119 tree t = *tp;
3121 *walk_subtrees = 0;
3122 if (TYPE_P (t))
3124 if (POINTER_TYPE_P (t))
3125 *walk_subtrees = 1;
3126 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3127 && !TYPE_SIZES_GIMPLIFIED (t))
3129 gimplify_type_sizes (t, (tree *) data);
3130 *walk_subtrees = 1;
3134 return NULL;
3137 /* Gimplify the parameter list for current_function_decl. This involves
3138 evaluating SAVE_EXPRs of variable sized parameters and generating code
3139 to implement callee-copies reference parameters. Returns a list of
3140 statements to add to the beginning of the function, or NULL if nothing
3141 to do. */
3143 tree
3144 gimplify_parameters (void)
3146 struct assign_parm_data_all all;
3147 tree fnargs, parm, stmts = NULL;
3149 assign_parms_initialize_all (&all);
3150 fnargs = assign_parms_augmented_arg_list (&all);
3152 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3154 struct assign_parm_data_one data;
3156 /* Extract the type of PARM; adjust it according to ABI. */
3157 assign_parm_find_data_types (&all, parm, &data);
3159 /* Early out for errors and void parameters. */
3160 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3161 continue;
3163 /* Update info on where next arg arrives in registers. */
3164 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3165 data.passed_type, data.named_arg);
3167 /* ??? Once upon a time variable_size stuffed parameter list
3168 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3169 turned out to be less than manageable in the gimple world.
3170 Now we have to hunt them down ourselves. */
3171 walk_tree_without_duplicates (&data.passed_type,
3172 gimplify_parm_type, &stmts);
3174 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3176 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3177 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3180 if (data.passed_pointer)
3182 tree type = TREE_TYPE (data.passed_type);
3183 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3184 type, data.named_arg))
3186 tree local, t;
3188 /* For constant sized objects, this is trivial; for
3189 variable-sized objects, we have to play games. */
3190 if (TREE_CONSTANT (DECL_SIZE (parm)))
3192 local = create_tmp_var (type, get_name (parm));
3193 DECL_IGNORED_P (local) = 0;
3195 else
3197 tree ptr_type, addr, args;
3199 ptr_type = build_pointer_type (type);
3200 addr = create_tmp_var (ptr_type, get_name (parm));
3201 DECL_IGNORED_P (addr) = 0;
3202 local = build_fold_indirect_ref (addr);
3204 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3205 t = built_in_decls[BUILT_IN_ALLOCA];
3206 t = build_function_call_expr (t, args);
3207 t = fold_convert (ptr_type, t);
3208 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3209 gimplify_and_add (t, &stmts);
3212 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3213 gimplify_and_add (t, &stmts);
3215 SET_DECL_VALUE_EXPR (parm, local);
3216 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3221 return stmts;
3224 /* Indicate whether REGNO is an incoming argument to the current function
3225 that was promoted to a wider mode. If so, return the RTX for the
3226 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3227 that REGNO is promoted from and whether the promotion was signed or
3228 unsigned. */
3231 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3233 tree arg;
3235 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3236 arg = TREE_CHAIN (arg))
3237 if (REG_P (DECL_INCOMING_RTL (arg))
3238 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3239 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3241 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3242 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3244 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3245 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3246 && mode != DECL_MODE (arg))
3248 *pmode = DECL_MODE (arg);
3249 *punsignedp = unsignedp;
3250 return DECL_INCOMING_RTL (arg);
3254 return 0;
3258 /* Compute the size and offset from the start of the stacked arguments for a
3259 parm passed in mode PASSED_MODE and with type TYPE.
3261 INITIAL_OFFSET_PTR points to the current offset into the stacked
3262 arguments.
3264 The starting offset and size for this parm are returned in
3265 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3266 nonzero, the offset is that of stack slot, which is returned in
3267 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3268 padding required from the initial offset ptr to the stack slot.
3270 IN_REGS is nonzero if the argument will be passed in registers. It will
3271 never be set if REG_PARM_STACK_SPACE is not defined.
3273 FNDECL is the function in which the argument was defined.
3275 There are two types of rounding that are done. The first, controlled by
3276 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3277 list to be aligned to the specific boundary (in bits). This rounding
3278 affects the initial and starting offsets, but not the argument size.
3280 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3281 optionally rounds the size of the parm to PARM_BOUNDARY. The
3282 initial offset is not affected by this rounding, while the size always
3283 is and the starting offset may be. */
3285 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3286 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3287 callers pass in the total size of args so far as
3288 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3290 void
3291 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3292 int partial, tree fndecl ATTRIBUTE_UNUSED,
3293 struct args_size *initial_offset_ptr,
3294 struct locate_and_pad_arg_data *locate)
3296 tree sizetree;
3297 enum direction where_pad;
3298 unsigned int boundary;
3299 int reg_parm_stack_space = 0;
3300 int part_size_in_regs;
3302 #ifdef REG_PARM_STACK_SPACE
3303 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3305 /* If we have found a stack parm before we reach the end of the
3306 area reserved for registers, skip that area. */
3307 if (! in_regs)
3309 if (reg_parm_stack_space > 0)
3311 if (initial_offset_ptr->var)
3313 initial_offset_ptr->var
3314 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3315 ssize_int (reg_parm_stack_space));
3316 initial_offset_ptr->constant = 0;
3318 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3319 initial_offset_ptr->constant = reg_parm_stack_space;
3322 #endif /* REG_PARM_STACK_SPACE */
3324 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3326 sizetree
3327 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3328 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3329 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3330 locate->where_pad = where_pad;
3331 locate->boundary = boundary;
3333 /* Remember if the outgoing parameter requires extra alignment on the
3334 calling function side. */
3335 if (boundary > PREFERRED_STACK_BOUNDARY)
3336 boundary = PREFERRED_STACK_BOUNDARY;
3337 if (cfun->stack_alignment_needed < boundary)
3338 cfun->stack_alignment_needed = boundary;
3340 #ifdef ARGS_GROW_DOWNWARD
3341 locate->slot_offset.constant = -initial_offset_ptr->constant;
3342 if (initial_offset_ptr->var)
3343 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3344 initial_offset_ptr->var);
3347 tree s2 = sizetree;
3348 if (where_pad != none
3349 && (!host_integerp (sizetree, 1)
3350 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3351 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3352 SUB_PARM_SIZE (locate->slot_offset, s2);
3355 locate->slot_offset.constant += part_size_in_regs;
3357 if (!in_regs
3358 #ifdef REG_PARM_STACK_SPACE
3359 || REG_PARM_STACK_SPACE (fndecl) > 0
3360 #endif
3362 pad_to_arg_alignment (&locate->slot_offset, boundary,
3363 &locate->alignment_pad);
3365 locate->size.constant = (-initial_offset_ptr->constant
3366 - locate->slot_offset.constant);
3367 if (initial_offset_ptr->var)
3368 locate->size.var = size_binop (MINUS_EXPR,
3369 size_binop (MINUS_EXPR,
3370 ssize_int (0),
3371 initial_offset_ptr->var),
3372 locate->slot_offset.var);
3374 /* Pad_below needs the pre-rounded size to know how much to pad
3375 below. */
3376 locate->offset = locate->slot_offset;
3377 if (where_pad == downward)
3378 pad_below (&locate->offset, passed_mode, sizetree);
3380 #else /* !ARGS_GROW_DOWNWARD */
3381 if (!in_regs
3382 #ifdef REG_PARM_STACK_SPACE
3383 || REG_PARM_STACK_SPACE (fndecl) > 0
3384 #endif
3386 pad_to_arg_alignment (initial_offset_ptr, boundary,
3387 &locate->alignment_pad);
3388 locate->slot_offset = *initial_offset_ptr;
3390 #ifdef PUSH_ROUNDING
3391 if (passed_mode != BLKmode)
3392 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3393 #endif
3395 /* Pad_below needs the pre-rounded size to know how much to pad below
3396 so this must be done before rounding up. */
3397 locate->offset = locate->slot_offset;
3398 if (where_pad == downward)
3399 pad_below (&locate->offset, passed_mode, sizetree);
3401 if (where_pad != none
3402 && (!host_integerp (sizetree, 1)
3403 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3404 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3406 ADD_PARM_SIZE (locate->size, sizetree);
3408 locate->size.constant -= part_size_in_regs;
3409 #endif /* ARGS_GROW_DOWNWARD */
3412 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3413 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3415 static void
3416 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3417 struct args_size *alignment_pad)
3419 tree save_var = NULL_TREE;
3420 HOST_WIDE_INT save_constant = 0;
3421 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3422 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3424 #ifdef SPARC_STACK_BOUNDARY_HACK
3425 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3426 the real alignment of %sp. However, when it does this, the
3427 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3428 if (SPARC_STACK_BOUNDARY_HACK)
3429 sp_offset = 0;
3430 #endif
3432 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3434 save_var = offset_ptr->var;
3435 save_constant = offset_ptr->constant;
3438 alignment_pad->var = NULL_TREE;
3439 alignment_pad->constant = 0;
3441 if (boundary > BITS_PER_UNIT)
3443 if (offset_ptr->var)
3445 tree sp_offset_tree = ssize_int (sp_offset);
3446 tree offset = size_binop (PLUS_EXPR,
3447 ARGS_SIZE_TREE (*offset_ptr),
3448 sp_offset_tree);
3449 #ifdef ARGS_GROW_DOWNWARD
3450 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3451 #else
3452 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3453 #endif
3455 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3456 /* ARGS_SIZE_TREE includes constant term. */
3457 offset_ptr->constant = 0;
3458 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3459 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3460 save_var);
3462 else
3464 offset_ptr->constant = -sp_offset +
3465 #ifdef ARGS_GROW_DOWNWARD
3466 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3467 #else
3468 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3469 #endif
3470 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3471 alignment_pad->constant = offset_ptr->constant - save_constant;
3476 static void
3477 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3479 if (passed_mode != BLKmode)
3481 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3482 offset_ptr->constant
3483 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3484 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3485 - GET_MODE_SIZE (passed_mode));
3487 else
3489 if (TREE_CODE (sizetree) != INTEGER_CST
3490 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3492 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3493 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3494 /* Add it in. */
3495 ADD_PARM_SIZE (*offset_ptr, s2);
3496 SUB_PARM_SIZE (*offset_ptr, sizetree);
3501 /* Walk the tree of blocks describing the binding levels within a function
3502 and warn about variables the might be killed by setjmp or vfork.
3503 This is done after calling flow_analysis and before global_alloc
3504 clobbers the pseudo-regs to hard regs. */
3506 void
3507 setjmp_vars_warning (tree block)
3509 tree decl, sub;
3511 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3513 if (TREE_CODE (decl) == VAR_DECL
3514 && DECL_RTL_SET_P (decl)
3515 && REG_P (DECL_RTL (decl))
3516 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3517 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3518 " or %<vfork%>",
3519 decl);
3522 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3523 setjmp_vars_warning (sub);
3526 /* Do the appropriate part of setjmp_vars_warning
3527 but for arguments instead of local variables. */
3529 void
3530 setjmp_args_warning (void)
3532 tree decl;
3533 for (decl = DECL_ARGUMENTS (current_function_decl);
3534 decl; decl = TREE_CHAIN (decl))
3535 if (DECL_RTL (decl) != 0
3536 && REG_P (DECL_RTL (decl))
3537 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3538 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3539 decl);
3543 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3544 and create duplicate blocks. */
3545 /* ??? Need an option to either create block fragments or to create
3546 abstract origin duplicates of a source block. It really depends
3547 on what optimization has been performed. */
3549 void
3550 reorder_blocks (void)
3552 tree block = DECL_INITIAL (current_function_decl);
3553 VEC(tree,heap) *block_stack;
3555 if (block == NULL_TREE)
3556 return;
3558 block_stack = VEC_alloc (tree, heap, 10);
3560 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3561 clear_block_marks (block);
3563 /* Prune the old trees away, so that they don't get in the way. */
3564 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3565 BLOCK_CHAIN (block) = NULL_TREE;
3567 /* Recreate the block tree from the note nesting. */
3568 reorder_blocks_1 (get_insns (), block, &block_stack);
3569 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3571 /* Remove deleted blocks from the block fragment chains. */
3572 reorder_fix_fragments (block);
3574 VEC_free (tree, heap, block_stack);
3577 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3579 void
3580 clear_block_marks (tree block)
3582 while (block)
3584 TREE_ASM_WRITTEN (block) = 0;
3585 clear_block_marks (BLOCK_SUBBLOCKS (block));
3586 block = BLOCK_CHAIN (block);
3590 static void
3591 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3593 rtx insn;
3595 for (insn = insns; insn; insn = NEXT_INSN (insn))
3597 if (NOTE_P (insn))
3599 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3601 tree block = NOTE_BLOCK (insn);
3603 /* If we have seen this block before, that means it now
3604 spans multiple address regions. Create a new fragment. */
3605 if (TREE_ASM_WRITTEN (block))
3607 tree new_block = copy_node (block);
3608 tree origin;
3610 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3611 ? BLOCK_FRAGMENT_ORIGIN (block)
3612 : block);
3613 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3614 BLOCK_FRAGMENT_CHAIN (new_block)
3615 = BLOCK_FRAGMENT_CHAIN (origin);
3616 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3618 NOTE_BLOCK (insn) = new_block;
3619 block = new_block;
3622 BLOCK_SUBBLOCKS (block) = 0;
3623 TREE_ASM_WRITTEN (block) = 1;
3624 /* When there's only one block for the entire function,
3625 current_block == block and we mustn't do this, it
3626 will cause infinite recursion. */
3627 if (block != current_block)
3629 BLOCK_SUPERCONTEXT (block) = current_block;
3630 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3631 BLOCK_SUBBLOCKS (current_block) = block;
3632 current_block = block;
3634 VEC_safe_push (tree, heap, *p_block_stack, block);
3636 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3638 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3639 BLOCK_SUBBLOCKS (current_block)
3640 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3641 current_block = BLOCK_SUPERCONTEXT (current_block);
3647 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3648 appears in the block tree, select one of the fragments to become
3649 the new origin block. */
3651 static void
3652 reorder_fix_fragments (tree block)
3654 while (block)
3656 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3657 tree new_origin = NULL_TREE;
3659 if (dup_origin)
3661 if (! TREE_ASM_WRITTEN (dup_origin))
3663 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3665 /* Find the first of the remaining fragments. There must
3666 be at least one -- the current block. */
3667 while (! TREE_ASM_WRITTEN (new_origin))
3668 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3669 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3672 else if (! dup_origin)
3673 new_origin = block;
3675 /* Re-root the rest of the fragments to the new origin. In the
3676 case that DUP_ORIGIN was null, that means BLOCK was the origin
3677 of a chain of fragments and we want to remove those fragments
3678 that didn't make it to the output. */
3679 if (new_origin)
3681 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3682 tree chain = *pp;
3684 while (chain)
3686 if (TREE_ASM_WRITTEN (chain))
3688 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3689 *pp = chain;
3690 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3692 chain = BLOCK_FRAGMENT_CHAIN (chain);
3694 *pp = NULL_TREE;
3697 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3698 block = BLOCK_CHAIN (block);
3702 /* Reverse the order of elements in the chain T of blocks,
3703 and return the new head of the chain (old last element). */
3705 tree
3706 blocks_nreverse (tree t)
3708 tree prev = 0, decl, next;
3709 for (decl = t; decl; decl = next)
3711 next = BLOCK_CHAIN (decl);
3712 BLOCK_CHAIN (decl) = prev;
3713 prev = decl;
3715 return prev;
3718 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3719 non-NULL, list them all into VECTOR, in a depth-first preorder
3720 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3721 blocks. */
3723 static int
3724 all_blocks (tree block, tree *vector)
3726 int n_blocks = 0;
3728 while (block)
3730 TREE_ASM_WRITTEN (block) = 0;
3732 /* Record this block. */
3733 if (vector)
3734 vector[n_blocks] = block;
3736 ++n_blocks;
3738 /* Record the subblocks, and their subblocks... */
3739 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3740 vector ? vector + n_blocks : 0);
3741 block = BLOCK_CHAIN (block);
3744 return n_blocks;
3747 /* Return a vector containing all the blocks rooted at BLOCK. The
3748 number of elements in the vector is stored in N_BLOCKS_P. The
3749 vector is dynamically allocated; it is the caller's responsibility
3750 to call `free' on the pointer returned. */
3752 static tree *
3753 get_block_vector (tree block, int *n_blocks_p)
3755 tree *block_vector;
3757 *n_blocks_p = all_blocks (block, NULL);
3758 block_vector = XNEWVEC (tree, *n_blocks_p);
3759 all_blocks (block, block_vector);
3761 return block_vector;
3764 static GTY(()) int next_block_index = 2;
3766 /* Set BLOCK_NUMBER for all the blocks in FN. */
3768 void
3769 number_blocks (tree fn)
3771 int i;
3772 int n_blocks;
3773 tree *block_vector;
3775 /* For SDB and XCOFF debugging output, we start numbering the blocks
3776 from 1 within each function, rather than keeping a running
3777 count. */
3778 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3779 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3780 next_block_index = 1;
3781 #endif
3783 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3785 /* The top-level BLOCK isn't numbered at all. */
3786 for (i = 1; i < n_blocks; ++i)
3787 /* We number the blocks from two. */
3788 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3790 free (block_vector);
3792 return;
3795 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3797 tree
3798 debug_find_var_in_block_tree (tree var, tree block)
3800 tree t;
3802 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3803 if (t == var)
3804 return block;
3806 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3808 tree ret = debug_find_var_in_block_tree (var, t);
3809 if (ret)
3810 return ret;
3813 return NULL_TREE;
3816 /* Allocate a function structure for FNDECL and set its contents
3817 to the defaults. */
3819 void
3820 allocate_struct_function (tree fndecl)
3822 tree result;
3823 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3825 cfun = ggc_alloc_cleared (sizeof (struct function));
3827 cfun->stack_alignment_needed = STACK_BOUNDARY;
3828 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3830 current_function_funcdef_no = funcdef_no++;
3832 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3834 init_eh_for_function ();
3836 lang_hooks.function.init (cfun);
3837 if (init_machine_status)
3838 cfun->machine = (*init_machine_status) ();
3840 if (fndecl == NULL)
3841 return;
3843 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3844 cfun->decl = fndecl;
3846 result = DECL_RESULT (fndecl);
3847 if (aggregate_value_p (result, fndecl))
3849 #ifdef PCC_STATIC_STRUCT_RETURN
3850 current_function_returns_pcc_struct = 1;
3851 #endif
3852 current_function_returns_struct = 1;
3855 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3857 current_function_stdarg
3858 = (fntype
3859 && TYPE_ARG_TYPES (fntype) != 0
3860 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3861 != void_type_node));
3863 /* Assume all registers in stdarg functions need to be saved. */
3864 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3865 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3868 /* Reset cfun, and other non-struct-function variables to defaults as
3869 appropriate for emitting rtl at the start of a function. */
3871 static void
3872 prepare_function_start (tree fndecl)
3874 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3875 cfun = DECL_STRUCT_FUNCTION (fndecl);
3876 else
3877 allocate_struct_function (fndecl);
3878 init_emit ();
3879 init_varasm_status (cfun);
3880 init_expr ();
3882 cse_not_expected = ! optimize;
3884 /* Caller save not needed yet. */
3885 caller_save_needed = 0;
3887 /* We haven't done register allocation yet. */
3888 reg_renumber = 0;
3890 /* Indicate that we have not instantiated virtual registers yet. */
3891 virtuals_instantiated = 0;
3893 /* Indicate that we want CONCATs now. */
3894 generating_concat_p = 1;
3896 /* Indicate we have no need of a frame pointer yet. */
3897 frame_pointer_needed = 0;
3900 /* Initialize the rtl expansion mechanism so that we can do simple things
3901 like generate sequences. This is used to provide a context during global
3902 initialization of some passes. */
3903 void
3904 init_dummy_function_start (void)
3906 prepare_function_start (NULL);
3909 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3910 and initialize static variables for generating RTL for the statements
3911 of the function. */
3913 void
3914 init_function_start (tree subr)
3916 prepare_function_start (subr);
3918 /* Prevent ever trying to delete the first instruction of a
3919 function. Also tell final how to output a linenum before the
3920 function prologue. Note linenums could be missing, e.g. when
3921 compiling a Java .class file. */
3922 if (! DECL_IS_BUILTIN (subr))
3923 emit_line_note (DECL_SOURCE_LOCATION (subr));
3925 /* Make sure first insn is a note even if we don't want linenums.
3926 This makes sure the first insn will never be deleted.
3927 Also, final expects a note to appear there. */
3928 emit_note (NOTE_INSN_DELETED);
3930 /* Warn if this value is an aggregate type,
3931 regardless of which calling convention we are using for it. */
3932 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3933 warning (OPT_Waggregate_return, "function returns an aggregate");
3936 /* Make sure all values used by the optimization passes have sane
3937 defaults. */
3938 unsigned int
3939 init_function_for_compilation (void)
3941 reg_renumber = 0;
3943 /* No prologue/epilogue insns yet. Make sure that these vectors are
3944 empty. */
3945 gcc_assert (VEC_length (int, prologue) == 0);
3946 gcc_assert (VEC_length (int, epilogue) == 0);
3947 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3948 return 0;
3951 struct tree_opt_pass pass_init_function =
3953 NULL, /* name */
3954 NULL, /* gate */
3955 init_function_for_compilation, /* execute */
3956 NULL, /* sub */
3957 NULL, /* next */
3958 0, /* static_pass_number */
3959 0, /* tv_id */
3960 0, /* properties_required */
3961 0, /* properties_provided */
3962 0, /* properties_destroyed */
3963 0, /* todo_flags_start */
3964 0, /* todo_flags_finish */
3965 0 /* letter */
3969 void
3970 expand_main_function (void)
3972 #if (defined(INVOKE__main) \
3973 || (!defined(HAS_INIT_SECTION) \
3974 && !defined(INIT_SECTION_ASM_OP) \
3975 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3976 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3977 #endif
3980 /* Expand code to initialize the stack_protect_guard. This is invoked at
3981 the beginning of a function to be protected. */
3983 #ifndef HAVE_stack_protect_set
3984 # define HAVE_stack_protect_set 0
3985 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3986 #endif
3988 void
3989 stack_protect_prologue (void)
3991 tree guard_decl = targetm.stack_protect_guard ();
3992 rtx x, y;
3994 /* Avoid expand_expr here, because we don't want guard_decl pulled
3995 into registers unless absolutely necessary. And we know that
3996 cfun->stack_protect_guard is a local stack slot, so this skips
3997 all the fluff. */
3998 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3999 y = validize_mem (DECL_RTL (guard_decl));
4001 /* Allow the target to copy from Y to X without leaking Y into a
4002 register. */
4003 if (HAVE_stack_protect_set)
4005 rtx insn = gen_stack_protect_set (x, y);
4006 if (insn)
4008 emit_insn (insn);
4009 return;
4013 /* Otherwise do a straight move. */
4014 emit_move_insn (x, y);
4017 /* Expand code to verify the stack_protect_guard. This is invoked at
4018 the end of a function to be protected. */
4020 #ifndef HAVE_stack_protect_test
4021 # define HAVE_stack_protect_test 0
4022 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4023 #endif
4025 void
4026 stack_protect_epilogue (void)
4028 tree guard_decl = targetm.stack_protect_guard ();
4029 rtx label = gen_label_rtx ();
4030 rtx x, y, tmp;
4032 /* Avoid expand_expr here, because we don't want guard_decl pulled
4033 into registers unless absolutely necessary. And we know that
4034 cfun->stack_protect_guard is a local stack slot, so this skips
4035 all the fluff. */
4036 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4037 y = validize_mem (DECL_RTL (guard_decl));
4039 /* Allow the target to compare Y with X without leaking either into
4040 a register. */
4041 switch (HAVE_stack_protect_test != 0)
4043 case 1:
4044 tmp = gen_stack_protect_test (x, y, label);
4045 if (tmp)
4047 emit_insn (tmp);
4048 break;
4050 /* FALLTHRU */
4052 default:
4053 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4054 break;
4057 /* The noreturn predictor has been moved to the tree level. The rtl-level
4058 predictors estimate this branch about 20%, which isn't enough to get
4059 things moved out of line. Since this is the only extant case of adding
4060 a noreturn function at the rtl level, it doesn't seem worth doing ought
4061 except adding the prediction by hand. */
4062 tmp = get_last_insn ();
4063 if (JUMP_P (tmp))
4064 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4066 expand_expr_stmt (targetm.stack_protect_fail ());
4067 emit_label (label);
4070 /* Start the RTL for a new function, and set variables used for
4071 emitting RTL.
4072 SUBR is the FUNCTION_DECL node.
4073 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4074 the function's parameters, which must be run at any return statement. */
4076 void
4077 expand_function_start (tree subr)
4079 /* Make sure volatile mem refs aren't considered
4080 valid operands of arithmetic insns. */
4081 init_recog_no_volatile ();
4083 current_function_profile
4084 = (profile_flag
4085 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4087 current_function_limit_stack
4088 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4090 /* Make the label for return statements to jump to. Do not special
4091 case machines with special return instructions -- they will be
4092 handled later during jump, ifcvt, or epilogue creation. */
4093 return_label = gen_label_rtx ();
4095 /* Initialize rtx used to return the value. */
4096 /* Do this before assign_parms so that we copy the struct value address
4097 before any library calls that assign parms might generate. */
4099 /* Decide whether to return the value in memory or in a register. */
4100 if (aggregate_value_p (DECL_RESULT (subr), subr))
4102 /* Returning something that won't go in a register. */
4103 rtx value_address = 0;
4105 #ifdef PCC_STATIC_STRUCT_RETURN
4106 if (current_function_returns_pcc_struct)
4108 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4109 value_address = assemble_static_space (size);
4111 else
4112 #endif
4114 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4115 /* Expect to be passed the address of a place to store the value.
4116 If it is passed as an argument, assign_parms will take care of
4117 it. */
4118 if (sv)
4120 value_address = gen_reg_rtx (Pmode);
4121 emit_move_insn (value_address, sv);
4124 if (value_address)
4126 rtx x = value_address;
4127 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4129 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4130 set_mem_attributes (x, DECL_RESULT (subr), 1);
4132 SET_DECL_RTL (DECL_RESULT (subr), x);
4135 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4136 /* If return mode is void, this decl rtl should not be used. */
4137 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4138 else
4140 /* Compute the return values into a pseudo reg, which we will copy
4141 into the true return register after the cleanups are done. */
4142 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4143 if (TYPE_MODE (return_type) != BLKmode
4144 && targetm.calls.return_in_msb (return_type))
4145 /* expand_function_end will insert the appropriate padding in
4146 this case. Use the return value's natural (unpadded) mode
4147 within the function proper. */
4148 SET_DECL_RTL (DECL_RESULT (subr),
4149 gen_reg_rtx (TYPE_MODE (return_type)));
4150 else
4152 /* In order to figure out what mode to use for the pseudo, we
4153 figure out what the mode of the eventual return register will
4154 actually be, and use that. */
4155 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4157 /* Structures that are returned in registers are not
4158 aggregate_value_p, so we may see a PARALLEL or a REG. */
4159 if (REG_P (hard_reg))
4160 SET_DECL_RTL (DECL_RESULT (subr),
4161 gen_reg_rtx (GET_MODE (hard_reg)));
4162 else
4164 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4165 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4169 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4170 result to the real return register(s). */
4171 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4174 /* Initialize rtx for parameters and local variables.
4175 In some cases this requires emitting insns. */
4176 assign_parms (subr);
4178 /* If function gets a static chain arg, store it. */
4179 if (cfun->static_chain_decl)
4181 tree parm = cfun->static_chain_decl;
4182 rtx local = gen_reg_rtx (Pmode);
4184 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4185 SET_DECL_RTL (parm, local);
4186 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4188 emit_move_insn (local, static_chain_incoming_rtx);
4191 /* If the function receives a non-local goto, then store the
4192 bits we need to restore the frame pointer. */
4193 if (cfun->nonlocal_goto_save_area)
4195 tree t_save;
4196 rtx r_save;
4198 /* ??? We need to do this save early. Unfortunately here is
4199 before the frame variable gets declared. Help out... */
4200 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4202 t_save = build4 (ARRAY_REF, ptr_type_node,
4203 cfun->nonlocal_goto_save_area,
4204 integer_zero_node, NULL_TREE, NULL_TREE);
4205 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4206 r_save = convert_memory_address (Pmode, r_save);
4208 emit_move_insn (r_save, virtual_stack_vars_rtx);
4209 update_nonlocal_goto_save_area ();
4212 /* The following was moved from init_function_start.
4213 The move is supposed to make sdb output more accurate. */
4214 /* Indicate the beginning of the function body,
4215 as opposed to parm setup. */
4216 emit_note (NOTE_INSN_FUNCTION_BEG);
4218 if (!NOTE_P (get_last_insn ()))
4219 emit_note (NOTE_INSN_DELETED);
4220 parm_birth_insn = get_last_insn ();
4222 if (current_function_profile)
4224 #ifdef PROFILE_HOOK
4225 PROFILE_HOOK (current_function_funcdef_no);
4226 #endif
4229 /* After the display initializations is where the tail-recursion label
4230 should go, if we end up needing one. Ensure we have a NOTE here
4231 since some things (like trampolines) get placed before this. */
4232 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4234 /* Make sure there is a line number after the function entry setup code. */
4235 force_next_line_note ();
4238 /* Undo the effects of init_dummy_function_start. */
4239 void
4240 expand_dummy_function_end (void)
4242 /* End any sequences that failed to be closed due to syntax errors. */
4243 while (in_sequence_p ())
4244 end_sequence ();
4246 /* Outside function body, can't compute type's actual size
4247 until next function's body starts. */
4249 free_after_parsing (cfun);
4250 free_after_compilation (cfun);
4251 cfun = 0;
4254 /* Call DOIT for each hard register used as a return value from
4255 the current function. */
4257 void
4258 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4260 rtx outgoing = current_function_return_rtx;
4262 if (! outgoing)
4263 return;
4265 if (REG_P (outgoing))
4266 (*doit) (outgoing, arg);
4267 else if (GET_CODE (outgoing) == PARALLEL)
4269 int i;
4271 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4273 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4276 (*doit) (x, arg);
4281 static void
4282 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4284 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4287 void
4288 clobber_return_register (void)
4290 diddle_return_value (do_clobber_return_reg, NULL);
4292 /* In case we do use pseudo to return value, clobber it too. */
4293 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4295 tree decl_result = DECL_RESULT (current_function_decl);
4296 rtx decl_rtl = DECL_RTL (decl_result);
4297 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4299 do_clobber_return_reg (decl_rtl, NULL);
4304 static void
4305 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4307 emit_insn (gen_rtx_USE (VOIDmode, reg));
4310 static void
4311 use_return_register (void)
4313 diddle_return_value (do_use_return_reg, NULL);
4316 /* Possibly warn about unused parameters. */
4317 void
4318 do_warn_unused_parameter (tree fn)
4320 tree decl;
4322 for (decl = DECL_ARGUMENTS (fn);
4323 decl; decl = TREE_CHAIN (decl))
4324 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4325 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4326 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4329 static GTY(()) rtx initial_trampoline;
4331 /* Generate RTL for the end of the current function. */
4333 void
4334 expand_function_end (void)
4336 rtx clobber_after;
4338 /* If arg_pointer_save_area was referenced only from a nested
4339 function, we will not have initialized it yet. Do that now. */
4340 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4341 get_arg_pointer_save_area (cfun);
4343 /* If we are doing stack checking and this function makes calls,
4344 do a stack probe at the start of the function to ensure we have enough
4345 space for another stack frame. */
4346 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4348 rtx insn, seq;
4350 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4351 if (CALL_P (insn))
4353 start_sequence ();
4354 probe_stack_range (STACK_CHECK_PROTECT,
4355 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4356 seq = get_insns ();
4357 end_sequence ();
4358 emit_insn_before (seq, tail_recursion_reentry);
4359 break;
4363 /* Possibly warn about unused parameters.
4364 When frontend does unit-at-a-time, the warning is already
4365 issued at finalization time. */
4366 if (warn_unused_parameter
4367 && !lang_hooks.callgraph.expand_function)
4368 do_warn_unused_parameter (current_function_decl);
4370 /* End any sequences that failed to be closed due to syntax errors. */
4371 while (in_sequence_p ())
4372 end_sequence ();
4374 clear_pending_stack_adjust ();
4375 do_pending_stack_adjust ();
4377 /* Mark the end of the function body.
4378 If control reaches this insn, the function can drop through
4379 without returning a value. */
4380 emit_note (NOTE_INSN_FUNCTION_END);
4382 /* Must mark the last line number note in the function, so that the test
4383 coverage code can avoid counting the last line twice. This just tells
4384 the code to ignore the immediately following line note, since there
4385 already exists a copy of this note somewhere above. This line number
4386 note is still needed for debugging though, so we can't delete it. */
4387 if (flag_test_coverage)
4388 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4390 /* Output a linenumber for the end of the function.
4391 SDB depends on this. */
4392 force_next_line_note ();
4393 emit_line_note (input_location);
4395 /* Before the return label (if any), clobber the return
4396 registers so that they are not propagated live to the rest of
4397 the function. This can only happen with functions that drop
4398 through; if there had been a return statement, there would
4399 have either been a return rtx, or a jump to the return label.
4401 We delay actual code generation after the current_function_value_rtx
4402 is computed. */
4403 clobber_after = get_last_insn ();
4405 /* Output the label for the actual return from the function. */
4406 emit_label (return_label);
4408 if (USING_SJLJ_EXCEPTIONS)
4410 /* Let except.c know where it should emit the call to unregister
4411 the function context for sjlj exceptions. */
4412 if (flag_exceptions)
4413 sjlj_emit_function_exit_after (get_last_insn ());
4415 else
4417 /* @@@ This is a kludge. We want to ensure that instructions that
4418 may trap are not moved into the epilogue by scheduling, because
4419 we don't always emit unwind information for the epilogue.
4420 However, not all machine descriptions define a blockage insn, so
4421 emit an ASM_INPUT to act as one. */
4422 if (flag_non_call_exceptions)
4423 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4426 /* If this is an implementation of throw, do what's necessary to
4427 communicate between __builtin_eh_return and the epilogue. */
4428 expand_eh_return ();
4430 /* If scalar return value was computed in a pseudo-reg, or was a named
4431 return value that got dumped to the stack, copy that to the hard
4432 return register. */
4433 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4435 tree decl_result = DECL_RESULT (current_function_decl);
4436 rtx decl_rtl = DECL_RTL (decl_result);
4438 if (REG_P (decl_rtl)
4439 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4440 : DECL_REGISTER (decl_result))
4442 rtx real_decl_rtl = current_function_return_rtx;
4444 /* This should be set in assign_parms. */
4445 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4447 /* If this is a BLKmode structure being returned in registers,
4448 then use the mode computed in expand_return. Note that if
4449 decl_rtl is memory, then its mode may have been changed,
4450 but that current_function_return_rtx has not. */
4451 if (GET_MODE (real_decl_rtl) == BLKmode)
4452 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4454 /* If a non-BLKmode return value should be padded at the least
4455 significant end of the register, shift it left by the appropriate
4456 amount. BLKmode results are handled using the group load/store
4457 machinery. */
4458 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4459 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4461 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4462 REGNO (real_decl_rtl)),
4463 decl_rtl);
4464 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4466 /* If a named return value dumped decl_return to memory, then
4467 we may need to re-do the PROMOTE_MODE signed/unsigned
4468 extension. */
4469 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4471 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4473 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4474 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4475 &unsignedp, 1);
4477 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4479 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4481 /* If expand_function_start has created a PARALLEL for decl_rtl,
4482 move the result to the real return registers. Otherwise, do
4483 a group load from decl_rtl for a named return. */
4484 if (GET_CODE (decl_rtl) == PARALLEL)
4485 emit_group_move (real_decl_rtl, decl_rtl);
4486 else
4487 emit_group_load (real_decl_rtl, decl_rtl,
4488 TREE_TYPE (decl_result),
4489 int_size_in_bytes (TREE_TYPE (decl_result)));
4491 /* In the case of complex integer modes smaller than a word, we'll
4492 need to generate some non-trivial bitfield insertions. Do that
4493 on a pseudo and not the hard register. */
4494 else if (GET_CODE (decl_rtl) == CONCAT
4495 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4496 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4498 int old_generating_concat_p;
4499 rtx tmp;
4501 old_generating_concat_p = generating_concat_p;
4502 generating_concat_p = 0;
4503 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4504 generating_concat_p = old_generating_concat_p;
4506 emit_move_insn (tmp, decl_rtl);
4507 emit_move_insn (real_decl_rtl, tmp);
4509 else
4510 emit_move_insn (real_decl_rtl, decl_rtl);
4514 /* If returning a structure, arrange to return the address of the value
4515 in a place where debuggers expect to find it.
4517 If returning a structure PCC style,
4518 the caller also depends on this value.
4519 And current_function_returns_pcc_struct is not necessarily set. */
4520 if (current_function_returns_struct
4521 || current_function_returns_pcc_struct)
4523 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4524 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4525 rtx outgoing;
4527 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4528 type = TREE_TYPE (type);
4529 else
4530 value_address = XEXP (value_address, 0);
4532 outgoing = targetm.calls.function_value (build_pointer_type (type),
4533 current_function_decl, true);
4535 /* Mark this as a function return value so integrate will delete the
4536 assignment and USE below when inlining this function. */
4537 REG_FUNCTION_VALUE_P (outgoing) = 1;
4539 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4540 value_address = convert_memory_address (GET_MODE (outgoing),
4541 value_address);
4543 emit_move_insn (outgoing, value_address);
4545 /* Show return register used to hold result (in this case the address
4546 of the result. */
4547 current_function_return_rtx = outgoing;
4550 /* Emit the actual code to clobber return register. */
4552 rtx seq;
4554 start_sequence ();
4555 clobber_return_register ();
4556 expand_naked_return ();
4557 seq = get_insns ();
4558 end_sequence ();
4560 emit_insn_after (seq, clobber_after);
4563 /* Output the label for the naked return from the function. */
4564 emit_label (naked_return_label);
4566 /* If stack protection is enabled for this function, check the guard. */
4567 if (cfun->stack_protect_guard)
4568 stack_protect_epilogue ();
4570 /* If we had calls to alloca, and this machine needs
4571 an accurate stack pointer to exit the function,
4572 insert some code to save and restore the stack pointer. */
4573 if (! EXIT_IGNORE_STACK
4574 && current_function_calls_alloca)
4576 rtx tem = 0;
4578 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4579 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4582 /* ??? This should no longer be necessary since stupid is no longer with
4583 us, but there are some parts of the compiler (eg reload_combine, and
4584 sh mach_dep_reorg) that still try and compute their own lifetime info
4585 instead of using the general framework. */
4586 use_return_register ();
4590 get_arg_pointer_save_area (struct function *f)
4592 rtx ret = f->x_arg_pointer_save_area;
4594 if (! ret)
4596 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4597 f->x_arg_pointer_save_area = ret;
4600 if (f == cfun && ! f->arg_pointer_save_area_init)
4602 rtx seq;
4604 /* Save the arg pointer at the beginning of the function. The
4605 generated stack slot may not be a valid memory address, so we
4606 have to check it and fix it if necessary. */
4607 start_sequence ();
4608 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4609 seq = get_insns ();
4610 end_sequence ();
4612 push_topmost_sequence ();
4613 emit_insn_after (seq, entry_of_function ());
4614 pop_topmost_sequence ();
4617 return ret;
4620 /* Extend a vector that records the INSN_UIDs of INSNS
4621 (a list of one or more insns). */
4623 static void
4624 record_insns (rtx insns, VEC(int,heap) **vecp)
4626 rtx tmp;
4628 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4629 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4632 /* Set the locator of the insn chain starting at INSN to LOC. */
4633 static void
4634 set_insn_locators (rtx insn, int loc)
4636 while (insn != NULL_RTX)
4638 if (INSN_P (insn))
4639 INSN_LOCATOR (insn) = loc;
4640 insn = NEXT_INSN (insn);
4644 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4645 be running after reorg, SEQUENCE rtl is possible. */
4647 static int
4648 contains (rtx insn, VEC(int,heap) **vec)
4650 int i, j;
4652 if (NONJUMP_INSN_P (insn)
4653 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4655 int count = 0;
4656 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4657 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4658 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4659 == VEC_index (int, *vec, j))
4660 count++;
4661 return count;
4663 else
4665 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4666 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4667 return 1;
4669 return 0;
4673 prologue_epilogue_contains (rtx insn)
4675 if (contains (insn, &prologue))
4676 return 1;
4677 if (contains (insn, &epilogue))
4678 return 1;
4679 return 0;
4683 sibcall_epilogue_contains (rtx insn)
4685 if (sibcall_epilogue)
4686 return contains (insn, &sibcall_epilogue);
4687 return 0;
4690 #ifdef HAVE_return
4691 /* Insert gen_return at the end of block BB. This also means updating
4692 block_for_insn appropriately. */
4694 static void
4695 emit_return_into_block (basic_block bb, rtx line_note)
4697 emit_jump_insn_after (gen_return (), BB_END (bb));
4698 if (line_note)
4699 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4701 #endif /* HAVE_return */
4703 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4705 /* These functions convert the epilogue into a variant that does not
4706 modify the stack pointer. This is used in cases where a function
4707 returns an object whose size is not known until it is computed.
4708 The called function leaves the object on the stack, leaves the
4709 stack depressed, and returns a pointer to the object.
4711 What we need to do is track all modifications and references to the
4712 stack pointer, deleting the modifications and changing the
4713 references to point to the location the stack pointer would have
4714 pointed to had the modifications taken place.
4716 These functions need to be portable so we need to make as few
4717 assumptions about the epilogue as we can. However, the epilogue
4718 basically contains three things: instructions to reset the stack
4719 pointer, instructions to reload registers, possibly including the
4720 frame pointer, and an instruction to return to the caller.
4722 We must be sure of what a relevant epilogue insn is doing. We also
4723 make no attempt to validate the insns we make since if they are
4724 invalid, we probably can't do anything valid. The intent is that
4725 these routines get "smarter" as more and more machines start to use
4726 them and they try operating on different epilogues.
4728 We use the following structure to track what the part of the
4729 epilogue that we've already processed has done. We keep two copies
4730 of the SP equivalence, one for use during the insn we are
4731 processing and one for use in the next insn. The difference is
4732 because one part of a PARALLEL may adjust SP and the other may use
4733 it. */
4735 struct epi_info
4737 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4738 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4739 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4740 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4741 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4742 should be set to once we no longer need
4743 its value. */
4744 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4745 for registers. */
4748 static void handle_epilogue_set (rtx, struct epi_info *);
4749 static void update_epilogue_consts (rtx, rtx, void *);
4750 static void emit_equiv_load (struct epi_info *);
4752 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4753 no modifications to the stack pointer. Return the new list of insns. */
4755 static rtx
4756 keep_stack_depressed (rtx insns)
4758 int j;
4759 struct epi_info info;
4760 rtx insn, next;
4762 /* If the epilogue is just a single instruction, it must be OK as is. */
4763 if (NEXT_INSN (insns) == NULL_RTX)
4764 return insns;
4766 /* Otherwise, start a sequence, initialize the information we have, and
4767 process all the insns we were given. */
4768 start_sequence ();
4770 info.sp_equiv_reg = stack_pointer_rtx;
4771 info.sp_offset = 0;
4772 info.equiv_reg_src = 0;
4774 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4775 info.const_equiv[j] = 0;
4777 insn = insns;
4778 next = NULL_RTX;
4779 while (insn != NULL_RTX)
4781 next = NEXT_INSN (insn);
4783 if (!INSN_P (insn))
4785 add_insn (insn);
4786 insn = next;
4787 continue;
4790 /* If this insn references the register that SP is equivalent to and
4791 we have a pending load to that register, we must force out the load
4792 first and then indicate we no longer know what SP's equivalent is. */
4793 if (info.equiv_reg_src != 0
4794 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4796 emit_equiv_load (&info);
4797 info.sp_equiv_reg = 0;
4800 info.new_sp_equiv_reg = info.sp_equiv_reg;
4801 info.new_sp_offset = info.sp_offset;
4803 /* If this is a (RETURN) and the return address is on the stack,
4804 update the address and change to an indirect jump. */
4805 if (GET_CODE (PATTERN (insn)) == RETURN
4806 || (GET_CODE (PATTERN (insn)) == PARALLEL
4807 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4809 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4810 rtx base = 0;
4811 HOST_WIDE_INT offset = 0;
4812 rtx jump_insn, jump_set;
4814 /* If the return address is in a register, we can emit the insn
4815 unchanged. Otherwise, it must be a MEM and we see what the
4816 base register and offset are. In any case, we have to emit any
4817 pending load to the equivalent reg of SP, if any. */
4818 if (REG_P (retaddr))
4820 emit_equiv_load (&info);
4821 add_insn (insn);
4822 insn = next;
4823 continue;
4825 else
4827 rtx ret_ptr;
4828 gcc_assert (MEM_P (retaddr));
4830 ret_ptr = XEXP (retaddr, 0);
4832 if (REG_P (ret_ptr))
4834 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4835 offset = 0;
4837 else
4839 gcc_assert (GET_CODE (ret_ptr) == PLUS
4840 && REG_P (XEXP (ret_ptr, 0))
4841 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4842 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4843 offset = INTVAL (XEXP (ret_ptr, 1));
4847 /* If the base of the location containing the return pointer
4848 is SP, we must update it with the replacement address. Otherwise,
4849 just build the necessary MEM. */
4850 retaddr = plus_constant (base, offset);
4851 if (base == stack_pointer_rtx)
4852 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4853 plus_constant (info.sp_equiv_reg,
4854 info.sp_offset));
4856 retaddr = gen_rtx_MEM (Pmode, retaddr);
4857 MEM_NOTRAP_P (retaddr) = 1;
4859 /* If there is a pending load to the equivalent register for SP
4860 and we reference that register, we must load our address into
4861 a scratch register and then do that load. */
4862 if (info.equiv_reg_src
4863 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4865 unsigned int regno;
4866 rtx reg;
4868 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4869 if (HARD_REGNO_MODE_OK (regno, Pmode)
4870 && !fixed_regs[regno]
4871 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4872 && !REGNO_REG_SET_P
4873 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4874 && !refers_to_regno_p (regno,
4875 regno + hard_regno_nregs[regno]
4876 [Pmode],
4877 info.equiv_reg_src, NULL)
4878 && info.const_equiv[regno] == 0)
4879 break;
4881 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4883 reg = gen_rtx_REG (Pmode, regno);
4884 emit_move_insn (reg, retaddr);
4885 retaddr = reg;
4888 emit_equiv_load (&info);
4889 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4891 /* Show the SET in the above insn is a RETURN. */
4892 jump_set = single_set (jump_insn);
4893 gcc_assert (jump_set);
4894 SET_IS_RETURN_P (jump_set) = 1;
4897 /* If SP is not mentioned in the pattern and its equivalent register, if
4898 any, is not modified, just emit it. Otherwise, if neither is set,
4899 replace the reference to SP and emit the insn. If none of those are
4900 true, handle each SET individually. */
4901 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4902 && (info.sp_equiv_reg == stack_pointer_rtx
4903 || !reg_set_p (info.sp_equiv_reg, insn)))
4904 add_insn (insn);
4905 else if (! reg_set_p (stack_pointer_rtx, insn)
4906 && (info.sp_equiv_reg == stack_pointer_rtx
4907 || !reg_set_p (info.sp_equiv_reg, insn)))
4909 int changed;
4911 changed = validate_replace_rtx (stack_pointer_rtx,
4912 plus_constant (info.sp_equiv_reg,
4913 info.sp_offset),
4914 insn);
4915 gcc_assert (changed);
4917 add_insn (insn);
4919 else if (GET_CODE (PATTERN (insn)) == SET)
4920 handle_epilogue_set (PATTERN (insn), &info);
4921 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4923 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4924 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4925 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4927 else
4928 add_insn (insn);
4930 info.sp_equiv_reg = info.new_sp_equiv_reg;
4931 info.sp_offset = info.new_sp_offset;
4933 /* Now update any constants this insn sets. */
4934 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4935 insn = next;
4938 insns = get_insns ();
4939 end_sequence ();
4940 return insns;
4943 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4944 structure that contains information about what we've seen so far. We
4945 process this SET by either updating that data or by emitting one or
4946 more insns. */
4948 static void
4949 handle_epilogue_set (rtx set, struct epi_info *p)
4951 /* First handle the case where we are setting SP. Record what it is being
4952 set from, which we must be able to determine */
4953 if (reg_set_p (stack_pointer_rtx, set))
4955 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4957 if (GET_CODE (SET_SRC (set)) == PLUS)
4959 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4960 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4961 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4962 else
4964 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4965 && (REGNO (XEXP (SET_SRC (set), 1))
4966 < FIRST_PSEUDO_REGISTER)
4967 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4968 p->new_sp_offset
4969 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4972 else
4973 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4975 /* If we are adjusting SP, we adjust from the old data. */
4976 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4978 p->new_sp_equiv_reg = p->sp_equiv_reg;
4979 p->new_sp_offset += p->sp_offset;
4982 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4984 return;
4987 /* Next handle the case where we are setting SP's equivalent
4988 register. We must not already have a value to set it to. We
4989 could update, but there seems little point in handling that case.
4990 Note that we have to allow for the case where we are setting the
4991 register set in the previous part of a PARALLEL inside a single
4992 insn. But use the old offset for any updates within this insn.
4993 We must allow for the case where the register is being set in a
4994 different (usually wider) mode than Pmode). */
4995 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4997 gcc_assert (!p->equiv_reg_src
4998 && REG_P (p->new_sp_equiv_reg)
4999 && REG_P (SET_DEST (set))
5000 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5001 <= BITS_PER_WORD)
5002 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5003 p->equiv_reg_src
5004 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5005 plus_constant (p->sp_equiv_reg,
5006 p->sp_offset));
5009 /* Otherwise, replace any references to SP in the insn to its new value
5010 and emit the insn. */
5011 else
5013 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5014 plus_constant (p->sp_equiv_reg,
5015 p->sp_offset));
5016 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5017 plus_constant (p->sp_equiv_reg,
5018 p->sp_offset));
5019 emit_insn (set);
5023 /* Update the tracking information for registers set to constants. */
5025 static void
5026 update_epilogue_consts (rtx dest, rtx x, void *data)
5028 struct epi_info *p = (struct epi_info *) data;
5029 rtx new;
5031 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5032 return;
5034 /* If we are either clobbering a register or doing a partial set,
5035 show we don't know the value. */
5036 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5037 p->const_equiv[REGNO (dest)] = 0;
5039 /* If we are setting it to a constant, record that constant. */
5040 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5041 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5043 /* If this is a binary operation between a register we have been tracking
5044 and a constant, see if we can compute a new constant value. */
5045 else if (ARITHMETIC_P (SET_SRC (x))
5046 && REG_P (XEXP (SET_SRC (x), 0))
5047 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5048 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5049 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5050 && 0 != (new = simplify_binary_operation
5051 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5052 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5053 XEXP (SET_SRC (x), 1)))
5054 && GET_CODE (new) == CONST_INT)
5055 p->const_equiv[REGNO (dest)] = new;
5057 /* Otherwise, we can't do anything with this value. */
5058 else
5059 p->const_equiv[REGNO (dest)] = 0;
5062 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5064 static void
5065 emit_equiv_load (struct epi_info *p)
5067 if (p->equiv_reg_src != 0)
5069 rtx dest = p->sp_equiv_reg;
5071 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5072 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5073 REGNO (p->sp_equiv_reg));
5075 emit_move_insn (dest, p->equiv_reg_src);
5076 p->equiv_reg_src = 0;
5079 #endif
5081 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5082 this into place with notes indicating where the prologue ends and where
5083 the epilogue begins. Update the basic block information when possible. */
5085 void
5086 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5088 int inserted = 0;
5089 edge e;
5090 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5091 rtx seq;
5092 #endif
5093 #ifdef HAVE_prologue
5094 rtx prologue_end = NULL_RTX;
5095 #endif
5096 #if defined (HAVE_epilogue) || defined(HAVE_return)
5097 rtx epilogue_end = NULL_RTX;
5098 #endif
5099 edge_iterator ei;
5101 #ifdef HAVE_prologue
5102 if (HAVE_prologue)
5104 start_sequence ();
5105 seq = gen_prologue ();
5106 emit_insn (seq);
5108 /* Retain a map of the prologue insns. */
5109 record_insns (seq, &prologue);
5110 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5112 seq = get_insns ();
5113 end_sequence ();
5114 set_insn_locators (seq, prologue_locator);
5116 /* Can't deal with multiple successors of the entry block
5117 at the moment. Function should always have at least one
5118 entry point. */
5119 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5121 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5122 inserted = 1;
5124 #endif
5126 /* If the exit block has no non-fake predecessors, we don't need
5127 an epilogue. */
5128 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5129 if ((e->flags & EDGE_FAKE) == 0)
5130 break;
5131 if (e == NULL)
5132 goto epilogue_done;
5134 #ifdef HAVE_return
5135 if (optimize && HAVE_return)
5137 /* If we're allowed to generate a simple return instruction,
5138 then by definition we don't need a full epilogue. Examine
5139 the block that falls through to EXIT. If it does not
5140 contain any code, examine its predecessors and try to
5141 emit (conditional) return instructions. */
5143 basic_block last;
5144 rtx label;
5146 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5147 if (e->flags & EDGE_FALLTHRU)
5148 break;
5149 if (e == NULL)
5150 goto epilogue_done;
5151 last = e->src;
5153 /* Verify that there are no active instructions in the last block. */
5154 label = BB_END (last);
5155 while (label && !LABEL_P (label))
5157 if (active_insn_p (label))
5158 break;
5159 label = PREV_INSN (label);
5162 if (BB_HEAD (last) == label && LABEL_P (label))
5164 edge_iterator ei2;
5165 rtx epilogue_line_note = NULL_RTX;
5167 /* Locate the line number associated with the closing brace,
5168 if we can find one. */
5169 for (seq = get_last_insn ();
5170 seq && ! active_insn_p (seq);
5171 seq = PREV_INSN (seq))
5172 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5174 epilogue_line_note = seq;
5175 break;
5178 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5180 basic_block bb = e->src;
5181 rtx jump;
5183 if (bb == ENTRY_BLOCK_PTR)
5185 ei_next (&ei2);
5186 continue;
5189 jump = BB_END (bb);
5190 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5192 ei_next (&ei2);
5193 continue;
5196 /* If we have an unconditional jump, we can replace that
5197 with a simple return instruction. */
5198 if (simplejump_p (jump))
5200 emit_return_into_block (bb, epilogue_line_note);
5201 delete_insn (jump);
5204 /* If we have a conditional jump, we can try to replace
5205 that with a conditional return instruction. */
5206 else if (condjump_p (jump))
5208 if (! redirect_jump (jump, 0, 0))
5210 ei_next (&ei2);
5211 continue;
5214 /* If this block has only one successor, it both jumps
5215 and falls through to the fallthru block, so we can't
5216 delete the edge. */
5217 if (single_succ_p (bb))
5219 ei_next (&ei2);
5220 continue;
5223 else
5225 ei_next (&ei2);
5226 continue;
5229 /* Fix up the CFG for the successful change we just made. */
5230 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5233 /* Emit a return insn for the exit fallthru block. Whether
5234 this is still reachable will be determined later. */
5236 emit_barrier_after (BB_END (last));
5237 emit_return_into_block (last, epilogue_line_note);
5238 epilogue_end = BB_END (last);
5239 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5240 goto epilogue_done;
5243 #endif
5244 /* Find the edge that falls through to EXIT. Other edges may exist
5245 due to RETURN instructions, but those don't need epilogues.
5246 There really shouldn't be a mixture -- either all should have
5247 been converted or none, however... */
5249 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5250 if (e->flags & EDGE_FALLTHRU)
5251 break;
5252 if (e == NULL)
5253 goto epilogue_done;
5255 #ifdef HAVE_epilogue
5256 if (HAVE_epilogue)
5258 start_sequence ();
5259 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5261 seq = gen_epilogue ();
5263 #ifdef INCOMING_RETURN_ADDR_RTX
5264 /* If this function returns with the stack depressed and we can support
5265 it, massage the epilogue to actually do that. */
5266 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5267 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5268 seq = keep_stack_depressed (seq);
5269 #endif
5271 emit_jump_insn (seq);
5273 /* Retain a map of the epilogue insns. */
5274 record_insns (seq, &epilogue);
5275 set_insn_locators (seq, epilogue_locator);
5277 seq = get_insns ();
5278 end_sequence ();
5280 insert_insn_on_edge (seq, e);
5281 inserted = 1;
5283 else
5284 #endif
5286 basic_block cur_bb;
5288 if (! next_active_insn (BB_END (e->src)))
5289 goto epilogue_done;
5290 /* We have a fall-through edge to the exit block, the source is not
5291 at the end of the function, and there will be an assembler epilogue
5292 at the end of the function.
5293 We can't use force_nonfallthru here, because that would try to
5294 use return. Inserting a jump 'by hand' is extremely messy, so
5295 we take advantage of cfg_layout_finalize using
5296 fixup_fallthru_exit_predecessor. */
5297 cfg_layout_initialize (0);
5298 FOR_EACH_BB (cur_bb)
5299 if (cur_bb->index >= NUM_FIXED_BLOCKS
5300 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5301 cur_bb->aux = cur_bb->next_bb;
5302 cfg_layout_finalize ();
5304 epilogue_done:
5306 if (inserted)
5307 commit_edge_insertions ();
5309 #ifdef HAVE_sibcall_epilogue
5310 /* Emit sibling epilogues before any sibling call sites. */
5311 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5313 basic_block bb = e->src;
5314 rtx insn = BB_END (bb);
5316 if (!CALL_P (insn)
5317 || ! SIBLING_CALL_P (insn))
5319 ei_next (&ei);
5320 continue;
5323 start_sequence ();
5324 emit_insn (gen_sibcall_epilogue ());
5325 seq = get_insns ();
5326 end_sequence ();
5328 /* Retain a map of the epilogue insns. Used in life analysis to
5329 avoid getting rid of sibcall epilogue insns. Do this before we
5330 actually emit the sequence. */
5331 record_insns (seq, &sibcall_epilogue);
5332 set_insn_locators (seq, epilogue_locator);
5334 emit_insn_before (seq, insn);
5335 ei_next (&ei);
5337 #endif
5339 #ifdef HAVE_prologue
5340 /* This is probably all useless now that we use locators. */
5341 if (prologue_end)
5343 rtx insn, prev;
5345 /* GDB handles `break f' by setting a breakpoint on the first
5346 line note after the prologue. Which means (1) that if
5347 there are line number notes before where we inserted the
5348 prologue we should move them, and (2) we should generate a
5349 note before the end of the first basic block, if there isn't
5350 one already there.
5352 ??? This behavior is completely broken when dealing with
5353 multiple entry functions. We simply place the note always
5354 into first basic block and let alternate entry points
5355 to be missed.
5358 for (insn = prologue_end; insn; insn = prev)
5360 prev = PREV_INSN (insn);
5361 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5363 /* Note that we cannot reorder the first insn in the
5364 chain, since rest_of_compilation relies on that
5365 remaining constant. */
5366 if (prev == NULL)
5367 break;
5368 reorder_insns (insn, insn, prologue_end);
5372 /* Find the last line number note in the first block. */
5373 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5374 insn != prologue_end && insn;
5375 insn = PREV_INSN (insn))
5376 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5377 break;
5379 /* If we didn't find one, make a copy of the first line number
5380 we run across. */
5381 if (! insn)
5383 for (insn = next_active_insn (prologue_end);
5384 insn;
5385 insn = PREV_INSN (insn))
5386 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5388 emit_note_copy_after (insn, prologue_end);
5389 break;
5393 #endif
5394 #ifdef HAVE_epilogue
5395 if (epilogue_end)
5397 rtx insn, next;
5399 /* Similarly, move any line notes that appear after the epilogue.
5400 There is no need, however, to be quite so anal about the existence
5401 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5402 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5403 info generation. */
5404 for (insn = epilogue_end; insn; insn = next)
5406 next = NEXT_INSN (insn);
5407 if (NOTE_P (insn)
5408 && (NOTE_LINE_NUMBER (insn) > 0
5409 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5410 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5411 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5414 #endif
5417 /* Reposition the prologue-end and epilogue-begin notes after instruction
5418 scheduling and delayed branch scheduling. */
5420 void
5421 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5423 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5424 rtx insn, last, note;
5425 int len;
5427 if ((len = VEC_length (int, prologue)) > 0)
5429 last = 0, note = 0;
5431 /* Scan from the beginning until we reach the last prologue insn.
5432 We apparently can't depend on basic_block_{head,end} after
5433 reorg has run. */
5434 for (insn = f; insn; insn = NEXT_INSN (insn))
5436 if (NOTE_P (insn))
5438 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5439 note = insn;
5441 else if (contains (insn, &prologue))
5443 last = insn;
5444 if (--len == 0)
5445 break;
5449 if (last)
5451 /* Find the prologue-end note if we haven't already, and
5452 move it to just after the last prologue insn. */
5453 if (note == 0)
5455 for (note = last; (note = NEXT_INSN (note));)
5456 if (NOTE_P (note)
5457 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5458 break;
5461 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5462 if (LABEL_P (last))
5463 last = NEXT_INSN (last);
5464 reorder_insns (note, note, last);
5468 if ((len = VEC_length (int, epilogue)) > 0)
5470 last = 0, note = 0;
5472 /* Scan from the end until we reach the first epilogue insn.
5473 We apparently can't depend on basic_block_{head,end} after
5474 reorg has run. */
5475 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5477 if (NOTE_P (insn))
5479 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5480 note = insn;
5482 else if (contains (insn, &epilogue))
5484 last = insn;
5485 if (--len == 0)
5486 break;
5490 if (last)
5492 /* Find the epilogue-begin note if we haven't already, and
5493 move it to just before the first epilogue insn. */
5494 if (note == 0)
5496 for (note = insn; (note = PREV_INSN (note));)
5497 if (NOTE_P (note)
5498 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5499 break;
5502 if (PREV_INSN (last) != note)
5503 reorder_insns (note, note, PREV_INSN (last));
5506 #endif /* HAVE_prologue or HAVE_epilogue */
5509 /* Resets insn_block_boundaries array. */
5511 void
5512 reset_block_changes (void)
5514 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5515 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5518 /* Record the boundary for BLOCK. */
5519 void
5520 record_block_change (tree block)
5522 int i, n;
5523 tree last_block;
5525 if (!block)
5526 return;
5528 if(!cfun->ib_boundaries_block)
5529 return;
5531 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5532 VARRAY_POP (cfun->ib_boundaries_block);
5533 n = get_max_uid ();
5534 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5535 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5537 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5540 /* Finishes record of boundaries. */
5541 void finalize_block_changes (void)
5543 record_block_change (DECL_INITIAL (current_function_decl));
5546 /* For INSN return the BLOCK it belongs to. */
5547 void
5548 check_block_change (rtx insn, tree *block)
5550 unsigned uid = INSN_UID (insn);
5552 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5553 return;
5555 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5558 /* Releases the ib_boundaries_block records. */
5559 void
5560 free_block_changes (void)
5562 cfun->ib_boundaries_block = NULL;
5565 /* Returns the name of the current function. */
5566 const char *
5567 current_function_name (void)
5569 return lang_hooks.decl_printable_name (cfun->decl, 2);
5573 static unsigned int
5574 rest_of_handle_check_leaf_regs (void)
5576 #ifdef LEAF_REGISTERS
5577 current_function_uses_only_leaf_regs
5578 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5579 #endif
5580 return 0;
5583 struct tree_opt_pass pass_leaf_regs =
5585 NULL, /* name */
5586 NULL, /* gate */
5587 rest_of_handle_check_leaf_regs, /* execute */
5588 NULL, /* sub */
5589 NULL, /* next */
5590 0, /* static_pass_number */
5591 0, /* tv_id */
5592 0, /* properties_required */
5593 0, /* properties_provided */
5594 0, /* properties_destroyed */
5595 0, /* todo_flags_start */
5596 0, /* todo_flags_finish */
5597 0 /* letter */
5601 #include "gt-function.h"