* basic-block.h (ei_safe_edge): New function.
[official-gcc.git] / gcc / function.c
blobf738493332db9eb977a72ae7d82879f9c19f22f4
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
65 #endif
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
69 #endif
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
77 #ifndef NAME__MAIN
78 #define NAME__MAIN "__main"
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* Nonzero if function being compiled doesn't contain any calls
91 (ignoring the prologue and epilogue). This is set prior to
92 local register allocation and is valid for the remaining
93 compiler passes. */
94 int current_function_is_leaf;
96 /* Nonzero if function being compiled doesn't modify the stack pointer
97 (ignoring the prologue and epilogue). This is only valid after
98 life_analysis has run. */
99 int current_function_sp_is_unchanging;
101 /* Nonzero if the function being compiled is a leaf function which only
102 uses leaf registers. This is valid after reload (specifically after
103 sched2) and is useful only if the port defines LEAF_REGISTERS. */
104 int current_function_uses_only_leaf_regs;
106 /* Nonzero once virtual register instantiation has been done.
107 assign_stack_local uses frame_pointer_rtx when this is nonzero.
108 calls.c:emit_library_call_value_1 uses it to set up
109 post-instantiation libcalls. */
110 int virtuals_instantiated;
112 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
113 static GTY(()) int funcdef_no;
115 /* These variables hold pointers to functions to create and destroy
116 target specific, per-function data structures. */
117 struct machine_function * (*init_machine_status) (void);
119 /* The currently compiled function. */
120 struct function *cfun = 0;
122 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
123 static GTY(()) varray_type prologue;
124 static GTY(()) varray_type epilogue;
126 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
127 in this function. */
128 static GTY(()) varray_type sibcall_epilogue;
130 /* In order to evaluate some expressions, such as function calls returning
131 structures in memory, we need to temporarily allocate stack locations.
132 We record each allocated temporary in the following structure.
134 Associated with each temporary slot is a nesting level. When we pop up
135 one level, all temporaries associated with the previous level are freed.
136 Normally, all temporaries are freed after the execution of the statement
137 in which they were created. However, if we are inside a ({...}) grouping,
138 the result may be in a temporary and hence must be preserved. If the
139 result could be in a temporary, we preserve it if we can determine which
140 one it is in. If we cannot determine which temporary may contain the
141 result, all temporaries are preserved. A temporary is preserved by
142 pretending it was allocated at the previous nesting level.
144 Automatic variables are also assigned temporary slots, at the nesting
145 level where they are defined. They are marked a "kept" so that
146 free_temp_slots will not free them. */
148 struct temp_slot GTY(())
150 /* Points to next temporary slot. */
151 struct temp_slot *next;
152 /* Points to previous temporary slot. */
153 struct temp_slot *prev;
155 /* The rtx to used to reference the slot. */
156 rtx slot;
157 /* The rtx used to represent the address if not the address of the
158 slot above. May be an EXPR_LIST if multiple addresses exist. */
159 rtx address;
160 /* The alignment (in bits) of the slot. */
161 unsigned int align;
162 /* The size, in units, of the slot. */
163 HOST_WIDE_INT size;
164 /* The type of the object in the slot, or zero if it doesn't correspond
165 to a type. We use this to determine whether a slot can be reused.
166 It can be reused if objects of the type of the new slot will always
167 conflict with objects of the type of the old slot. */
168 tree type;
169 /* Nonzero if this temporary is currently in use. */
170 char in_use;
171 /* Nonzero if this temporary has its address taken. */
172 char addr_taken;
173 /* Nesting level at which this slot is being used. */
174 int level;
175 /* Nonzero if this should survive a call to free_temp_slots. */
176 int keep;
177 /* The offset of the slot from the frame_pointer, including extra space
178 for alignment. This info is for combine_temp_slots. */
179 HOST_WIDE_INT base_offset;
180 /* The size of the slot, including extra space for alignment. This
181 info is for combine_temp_slots. */
182 HOST_WIDE_INT full_size;
185 /* Forward declarations. */
187 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
188 struct function *);
189 static struct temp_slot *find_temp_slot_from_address (rtx);
190 static void instantiate_decls (tree, int);
191 static void instantiate_decls_1 (tree, int);
192 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
193 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
194 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, varray_type *);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
205 static int contains (rtx, varray_type);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block, rtx);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void instantiate_virtual_regs_lossage (rtx);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
224 struct function *
225 find_function_data (tree decl)
227 struct function *p;
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
233 gcc_unreachable ();
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. The caller should use
238 the enter_nested langhook to save any language-specific state,
239 since this function knows only about language-independent
240 variables. */
242 void
243 push_function_context_to (tree context)
245 struct function *p;
247 if (context)
249 if (context == current_function_decl)
250 cfun->contains_functions = 1;
251 else
253 struct function *containing = find_function_data (context);
254 containing->contains_functions = 1;
258 if (cfun == 0)
259 init_dummy_function_start ();
260 p = cfun;
262 p->outer = outer_function_chain;
263 outer_function_chain = p;
265 lang_hooks.function.enter_nested (p);
267 cfun = 0;
270 void
271 push_function_context (void)
273 push_function_context_to (current_function_decl);
276 /* Restore the last saved context, at the end of a nested function.
277 This function is called from language-specific code. */
279 void
280 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
282 struct function *p = outer_function_chain;
284 cfun = p;
285 outer_function_chain = p->outer;
287 current_function_decl = p->decl;
288 reg_renumber = 0;
290 restore_emit_status (p);
292 lang_hooks.function.leave_nested (p);
294 /* Reset variables that have known state during rtx generation. */
295 virtuals_instantiated = 0;
296 generating_concat_p = 1;
299 void
300 pop_function_context (void)
302 pop_function_context_from (current_function_decl);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been parsed, but not compiled, to let
307 garbage collection reclaim the memory. */
309 void
310 free_after_parsing (struct function *f)
312 /* f->expr->forced_labels is used by code generation. */
313 /* f->emit->regno_reg_rtx is used by code generation. */
314 /* f->varasm is used by code generation. */
315 /* f->eh->eh_return_stub_label is used by code generation. */
317 lang_hooks.function.final (f);
320 /* Clear out all parts of the state in F that can safely be discarded
321 after the function has been compiled, to let garbage collection
322 reclaim the memory. */
324 void
325 free_after_compilation (struct function *f)
327 f->eh = NULL;
328 f->expr = NULL;
329 f->emit = NULL;
330 f->varasm = NULL;
331 f->machine = NULL;
333 f->x_avail_temp_slots = NULL;
334 f->x_used_temp_slots = NULL;
335 f->arg_offset_rtx = NULL;
336 f->return_rtx = NULL;
337 f->internal_arg_pointer = NULL;
338 f->x_nonlocal_goto_handler_labels = NULL;
339 f->x_return_label = NULL;
340 f->x_naked_return_label = NULL;
341 f->x_stack_slot_list = NULL;
342 f->x_tail_recursion_reentry = NULL;
343 f->x_arg_pointer_save_area = NULL;
344 f->x_parm_birth_insn = NULL;
345 f->original_arg_vector = NULL;
346 f->original_decl_initial = NULL;
347 f->epilogue_delay_list = NULL;
350 /* Allocate fixed slots in the stack frame of the current function. */
352 /* Return size needed for stack frame based on slots so far allocated in
353 function F.
354 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
355 the caller may have to do that. */
357 HOST_WIDE_INT
358 get_func_frame_size (struct function *f)
360 #ifdef FRAME_GROWS_DOWNWARD
361 return -f->x_frame_offset;
362 #else
363 return f->x_frame_offset;
364 #endif
367 /* Return size needed for stack frame based on slots so far allocated.
368 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
369 the caller may have to do that. */
370 HOST_WIDE_INT
371 get_frame_size (void)
373 return get_func_frame_size (cfun);
376 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
377 with machine mode MODE.
379 ALIGN controls the amount of alignment for the address of the slot:
380 0 means according to MODE,
381 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
382 -2 means use BITS_PER_UNIT,
383 positive specifies alignment boundary in bits.
385 We do not round to stack_boundary here.
387 FUNCTION specifies the function to allocate in. */
389 static rtx
390 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
391 struct function *function)
393 rtx x, addr;
394 int bigend_correction = 0;
395 unsigned int alignment;
396 int frame_off, frame_alignment, frame_phase;
398 if (align == 0)
400 tree type;
402 if (mode == BLKmode)
403 alignment = BIGGEST_ALIGNMENT;
404 else
405 alignment = GET_MODE_ALIGNMENT (mode);
407 /* Allow the target to (possibly) increase the alignment of this
408 stack slot. */
409 type = lang_hooks.types.type_for_mode (mode, 0);
410 if (type)
411 alignment = LOCAL_ALIGNMENT (type, alignment);
413 alignment /= BITS_PER_UNIT;
415 else if (align == -1)
417 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
418 size = CEIL_ROUND (size, alignment);
420 else if (align == -2)
421 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
422 else
423 alignment = align / BITS_PER_UNIT;
425 #ifdef FRAME_GROWS_DOWNWARD
426 function->x_frame_offset -= size;
427 #endif
429 /* Ignore alignment we can't do with expected alignment of the boundary. */
430 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
431 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
433 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
434 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
436 /* Calculate how many bytes the start of local variables is off from
437 stack alignment. */
438 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
440 frame_phase = frame_off ? frame_alignment - frame_off : 0;
442 /* Round the frame offset to the specified alignment. The default is
443 to always honor requests to align the stack but a port may choose to
444 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
445 if (STACK_ALIGNMENT_NEEDED
446 || mode != BLKmode
447 || size != 0)
449 /* We must be careful here, since FRAME_OFFSET might be negative and
450 division with a negative dividend isn't as well defined as we might
451 like. So we instead assume that ALIGNMENT is a power of two and
452 use logical operations which are unambiguous. */
453 #ifdef FRAME_GROWS_DOWNWARD
454 function->x_frame_offset
455 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
456 (unsigned HOST_WIDE_INT) alignment)
457 + frame_phase);
458 #else
459 function->x_frame_offset
460 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
461 (unsigned HOST_WIDE_INT) alignment)
462 + frame_phase);
463 #endif
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN && mode != BLKmode)
469 bigend_correction = size - GET_MODE_SIZE (mode);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (function == cfun && virtuals_instantiated)
474 addr = plus_constant (frame_pointer_rtx,
475 trunc_int_for_mode
476 (frame_offset + bigend_correction
477 + STARTING_FRAME_OFFSET, Pmode));
478 else
479 addr = plus_constant (virtual_stack_vars_rtx,
480 trunc_int_for_mode
481 (function->x_frame_offset + bigend_correction,
482 Pmode));
484 #ifndef FRAME_GROWS_DOWNWARD
485 function->x_frame_offset += size;
486 #endif
488 x = gen_rtx_MEM (mode, addr);
490 function->x_stack_slot_list
491 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
493 return x;
496 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
497 current function. */
500 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
502 return assign_stack_local_1 (mode, size, align, cfun);
506 /* Removes temporary slot TEMP from LIST. */
508 static void
509 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
511 if (temp->next)
512 temp->next->prev = temp->prev;
513 if (temp->prev)
514 temp->prev->next = temp->next;
515 else
516 *list = temp->next;
518 temp->prev = temp->next = NULL;
521 /* Inserts temporary slot TEMP to LIST. */
523 static void
524 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
526 temp->next = *list;
527 if (*list)
528 (*list)->prev = temp;
529 temp->prev = NULL;
530 *list = temp;
533 /* Returns the list of used temp slots at LEVEL. */
535 static struct temp_slot **
536 temp_slots_at_level (int level)
538 level++;
540 if (!used_temp_slots)
541 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
543 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
544 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
546 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
549 /* Returns the maximal temporary slot level. */
551 static int
552 max_slot_level (void)
554 if (!used_temp_slots)
555 return -1;
557 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
560 /* Moves temporary slot TEMP to LEVEL. */
562 static void
563 move_slot_to_level (struct temp_slot *temp, int level)
565 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
566 insert_slot_to_list (temp, temp_slots_at_level (level));
567 temp->level = level;
570 /* Make temporary slot TEMP available. */
572 static void
573 make_slot_available (struct temp_slot *temp)
575 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
576 insert_slot_to_list (temp, &avail_temp_slots);
577 temp->in_use = 0;
578 temp->level = -1;
581 /* Allocate a temporary stack slot and record it for possible later
582 reuse.
584 MODE is the machine mode to be given to the returned rtx.
586 SIZE is the size in units of the space required. We do no rounding here
587 since assign_stack_local will do any required rounding.
589 KEEP is 1 if this slot is to be retained after a call to
590 free_temp_slots. Automatic variables for a block are allocated
591 with this flag. KEEP values of 2 or 3 were needed respectively
592 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
593 or for SAVE_EXPRs, but they are now unused and will abort.
595 TYPE is the type that will be used for the stack slot. */
598 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
599 tree type)
601 unsigned int align;
602 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
603 rtx slot;
605 /* If SIZE is -1 it means that somebody tried to allocate a temporary
606 of a variable size. */
607 gcc_assert (size != -1);
609 /* These are now unused. */
610 gcc_assert (keep <= 1);
612 if (mode == BLKmode)
613 align = BIGGEST_ALIGNMENT;
614 else
615 align = GET_MODE_ALIGNMENT (mode);
617 if (! type)
618 type = lang_hooks.types.type_for_mode (mode, 0);
620 if (type)
621 align = LOCAL_ALIGNMENT (type, align);
623 /* Try to find an available, already-allocated temporary of the proper
624 mode which meets the size and alignment requirements. Choose the
625 smallest one with the closest alignment. */
626 for (p = avail_temp_slots; p; p = p->next)
628 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
629 && objects_must_conflict_p (p->type, type)
630 && (best_p == 0 || best_p->size > p->size
631 || (best_p->size == p->size && best_p->align > p->align)))
633 if (p->align == align && p->size == size)
635 selected = p;
636 cut_slot_from_list (selected, &avail_temp_slots);
637 best_p = 0;
638 break;
640 best_p = p;
644 /* Make our best, if any, the one to use. */
645 if (best_p)
647 selected = best_p;
648 cut_slot_from_list (selected, &avail_temp_slots);
650 /* If there are enough aligned bytes left over, make them into a new
651 temp_slot so that the extra bytes don't get wasted. Do this only
652 for BLKmode slots, so that we can be sure of the alignment. */
653 if (GET_MODE (best_p->slot) == BLKmode)
655 int alignment = best_p->align / BITS_PER_UNIT;
656 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
658 if (best_p->size - rounded_size >= alignment)
660 p = ggc_alloc (sizeof (struct temp_slot));
661 p->in_use = p->addr_taken = 0;
662 p->size = best_p->size - rounded_size;
663 p->base_offset = best_p->base_offset + rounded_size;
664 p->full_size = best_p->full_size - rounded_size;
665 p->slot = gen_rtx_MEM (BLKmode,
666 plus_constant (XEXP (best_p->slot, 0),
667 rounded_size));
668 p->align = best_p->align;
669 p->address = 0;
670 p->type = best_p->type;
671 insert_slot_to_list (p, &avail_temp_slots);
673 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
674 stack_slot_list);
676 best_p->size = rounded_size;
677 best_p->full_size = rounded_size;
682 /* If we still didn't find one, make a new temporary. */
683 if (selected == 0)
685 HOST_WIDE_INT frame_offset_old = frame_offset;
687 p = ggc_alloc (sizeof (struct temp_slot));
689 /* We are passing an explicit alignment request to assign_stack_local.
690 One side effect of that is assign_stack_local will not round SIZE
691 to ensure the frame offset remains suitably aligned.
693 So for requests which depended on the rounding of SIZE, we go ahead
694 and round it now. We also make sure ALIGNMENT is at least
695 BIGGEST_ALIGNMENT. */
696 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
697 p->slot = assign_stack_local (mode,
698 (mode == BLKmode
699 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
700 : size),
701 align);
703 p->align = align;
705 /* The following slot size computation is necessary because we don't
706 know the actual size of the temporary slot until assign_stack_local
707 has performed all the frame alignment and size rounding for the
708 requested temporary. Note that extra space added for alignment
709 can be either above or below this stack slot depending on which
710 way the frame grows. We include the extra space if and only if it
711 is above this slot. */
712 #ifdef FRAME_GROWS_DOWNWARD
713 p->size = frame_offset_old - frame_offset;
714 #else
715 p->size = size;
716 #endif
718 /* Now define the fields used by combine_temp_slots. */
719 #ifdef FRAME_GROWS_DOWNWARD
720 p->base_offset = frame_offset;
721 p->full_size = frame_offset_old - frame_offset;
722 #else
723 p->base_offset = frame_offset_old;
724 p->full_size = frame_offset - frame_offset_old;
725 #endif
726 p->address = 0;
728 selected = p;
731 p = selected;
732 p->in_use = 1;
733 p->addr_taken = 0;
734 p->type = type;
735 p->level = temp_slot_level;
736 p->keep = keep;
738 pp = temp_slots_at_level (p->level);
739 insert_slot_to_list (p, pp);
741 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
742 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
743 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
745 /* If we know the alias set for the memory that will be used, use
746 it. If there's no TYPE, then we don't know anything about the
747 alias set for the memory. */
748 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
749 set_mem_align (slot, align);
751 /* If a type is specified, set the relevant flags. */
752 if (type != 0)
754 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
755 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
758 return slot;
761 /* Allocate a temporary stack slot and record it for possible later
762 reuse. First three arguments are same as in preceding function. */
765 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
767 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
770 /* Assign a temporary.
771 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
772 and so that should be used in error messages. In either case, we
773 allocate of the given type.
774 KEEP is as for assign_stack_temp.
775 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
776 it is 0 if a register is OK.
777 DONT_PROMOTE is 1 if we should not promote values in register
778 to wider modes. */
781 assign_temp (tree type_or_decl, int keep, int memory_required,
782 int dont_promote ATTRIBUTE_UNUSED)
784 tree type, decl;
785 enum machine_mode mode;
786 #ifdef PROMOTE_MODE
787 int unsignedp;
788 #endif
790 if (DECL_P (type_or_decl))
791 decl = type_or_decl, type = TREE_TYPE (decl);
792 else
793 decl = NULL, type = type_or_decl;
795 mode = TYPE_MODE (type);
796 #ifdef PROMOTE_MODE
797 unsignedp = TYPE_UNSIGNED (type);
798 #endif
800 if (mode == BLKmode || memory_required)
802 HOST_WIDE_INT size = int_size_in_bytes (type);
803 tree size_tree;
804 rtx tmp;
806 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
807 problems with allocating the stack space. */
808 if (size == 0)
809 size = 1;
811 /* Unfortunately, we don't yet know how to allocate variable-sized
812 temporaries. However, sometimes we have a fixed upper limit on
813 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
814 instead. This is the case for Chill variable-sized strings. */
815 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
816 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
817 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
818 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
820 /* If we still haven't been able to get a size, see if the language
821 can compute a maximum size. */
822 if (size == -1
823 && (size_tree = lang_hooks.types.max_size (type)) != 0
824 && host_integerp (size_tree, 1))
825 size = tree_low_cst (size_tree, 1);
827 /* The size of the temporary may be too large to fit into an integer. */
828 /* ??? Not sure this should happen except for user silliness, so limit
829 this to things that aren't compiler-generated temporaries. The
830 rest of the time we'll abort in assign_stack_temp_for_type. */
831 if (decl && size == -1
832 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
834 error ("%Jsize of variable '%D' is too large", decl, decl);
835 size = 1;
838 tmp = assign_stack_temp_for_type (mode, size, keep, type);
839 return tmp;
842 #ifdef PROMOTE_MODE
843 if (! dont_promote)
844 mode = promote_mode (type, mode, &unsignedp, 0);
845 #endif
847 return gen_reg_rtx (mode);
850 /* Combine temporary stack slots which are adjacent on the stack.
852 This allows for better use of already allocated stack space. This is only
853 done for BLKmode slots because we can be sure that we won't have alignment
854 problems in this case. */
856 void
857 combine_temp_slots (void)
859 struct temp_slot *p, *q, *next, *next_q;
860 int num_slots;
862 /* We can't combine slots, because the information about which slot
863 is in which alias set will be lost. */
864 if (flag_strict_aliasing)
865 return;
867 /* If there are a lot of temp slots, don't do anything unless
868 high levels of optimization. */
869 if (! flag_expensive_optimizations)
870 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
871 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
872 return;
874 for (p = avail_temp_slots; p; p = next)
876 int delete_p = 0;
878 next = p->next;
880 if (GET_MODE (p->slot) != BLKmode)
881 continue;
883 for (q = p->next; q; q = next_q)
885 int delete_q = 0;
887 next_q = q->next;
889 if (GET_MODE (q->slot) != BLKmode)
890 continue;
892 if (p->base_offset + p->full_size == q->base_offset)
894 /* Q comes after P; combine Q into P. */
895 p->size += q->size;
896 p->full_size += q->full_size;
897 delete_q = 1;
899 else if (q->base_offset + q->full_size == p->base_offset)
901 /* P comes after Q; combine P into Q. */
902 q->size += p->size;
903 q->full_size += p->full_size;
904 delete_p = 1;
905 break;
907 if (delete_q)
908 cut_slot_from_list (q, &avail_temp_slots);
911 /* Either delete P or advance past it. */
912 if (delete_p)
913 cut_slot_from_list (p, &avail_temp_slots);
917 /* Find the temp slot corresponding to the object at address X. */
919 static struct temp_slot *
920 find_temp_slot_from_address (rtx x)
922 struct temp_slot *p;
923 rtx next;
924 int i;
926 for (i = max_slot_level (); i >= 0; i--)
927 for (p = *temp_slots_at_level (i); p; p = p->next)
929 if (XEXP (p->slot, 0) == x
930 || p->address == x
931 || (GET_CODE (x) == PLUS
932 && XEXP (x, 0) == virtual_stack_vars_rtx
933 && GET_CODE (XEXP (x, 1)) == CONST_INT
934 && INTVAL (XEXP (x, 1)) >= p->base_offset
935 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
936 return p;
938 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
939 for (next = p->address; next; next = XEXP (next, 1))
940 if (XEXP (next, 0) == x)
941 return p;
944 /* If we have a sum involving a register, see if it points to a temp
945 slot. */
946 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
947 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
948 return p;
949 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
950 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
951 return p;
953 return 0;
956 /* Indicate that NEW is an alternate way of referring to the temp slot
957 that previously was known by OLD. */
959 void
960 update_temp_slot_address (rtx old, rtx new)
962 struct temp_slot *p;
964 if (rtx_equal_p (old, new))
965 return;
967 p = find_temp_slot_from_address (old);
969 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
970 is a register, see if one operand of the PLUS is a temporary
971 location. If so, NEW points into it. Otherwise, if both OLD and
972 NEW are a PLUS and if there is a register in common between them.
973 If so, try a recursive call on those values. */
974 if (p == 0)
976 if (GET_CODE (old) != PLUS)
977 return;
979 if (REG_P (new))
981 update_temp_slot_address (XEXP (old, 0), new);
982 update_temp_slot_address (XEXP (old, 1), new);
983 return;
985 else if (GET_CODE (new) != PLUS)
986 return;
988 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
991 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
992 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
994 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
995 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
997 return;
1000 /* Otherwise add an alias for the temp's address. */
1001 else if (p->address == 0)
1002 p->address = new;
1003 else
1005 if (GET_CODE (p->address) != EXPR_LIST)
1006 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1008 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1012 /* If X could be a reference to a temporary slot, mark the fact that its
1013 address was taken. */
1015 void
1016 mark_temp_addr_taken (rtx x)
1018 struct temp_slot *p;
1020 if (x == 0)
1021 return;
1023 /* If X is not in memory or is at a constant address, it cannot be in
1024 a temporary slot. */
1025 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1026 return;
1028 p = find_temp_slot_from_address (XEXP (x, 0));
1029 if (p != 0)
1030 p->addr_taken = 1;
1033 /* If X could be a reference to a temporary slot, mark that slot as
1034 belonging to the to one level higher than the current level. If X
1035 matched one of our slots, just mark that one. Otherwise, we can't
1036 easily predict which it is, so upgrade all of them. Kept slots
1037 need not be touched.
1039 This is called when an ({...}) construct occurs and a statement
1040 returns a value in memory. */
1042 void
1043 preserve_temp_slots (rtx x)
1045 struct temp_slot *p = 0, *next;
1047 /* If there is no result, we still might have some objects whose address
1048 were taken, so we need to make sure they stay around. */
1049 if (x == 0)
1051 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1053 next = p->next;
1055 if (p->addr_taken)
1056 move_slot_to_level (p, temp_slot_level - 1);
1059 return;
1062 /* If X is a register that is being used as a pointer, see if we have
1063 a temporary slot we know it points to. To be consistent with
1064 the code below, we really should preserve all non-kept slots
1065 if we can't find a match, but that seems to be much too costly. */
1066 if (REG_P (x) && REG_POINTER (x))
1067 p = find_temp_slot_from_address (x);
1069 /* If X is not in memory or is at a constant address, it cannot be in
1070 a temporary slot, but it can contain something whose address was
1071 taken. */
1072 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1074 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1076 next = p->next;
1078 if (p->addr_taken)
1079 move_slot_to_level (p, temp_slot_level - 1);
1082 return;
1085 /* First see if we can find a match. */
1086 if (p == 0)
1087 p = find_temp_slot_from_address (XEXP (x, 0));
1089 if (p != 0)
1091 /* Move everything at our level whose address was taken to our new
1092 level in case we used its address. */
1093 struct temp_slot *q;
1095 if (p->level == temp_slot_level)
1097 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1099 next = q->next;
1101 if (p != q && q->addr_taken)
1102 move_slot_to_level (q, temp_slot_level - 1);
1105 move_slot_to_level (p, temp_slot_level - 1);
1106 p->addr_taken = 0;
1108 return;
1111 /* Otherwise, preserve all non-kept slots at this level. */
1112 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1114 next = p->next;
1116 if (!p->keep)
1117 move_slot_to_level (p, temp_slot_level - 1);
1121 /* Free all temporaries used so far. This is normally called at the
1122 end of generating code for a statement. */
1124 void
1125 free_temp_slots (void)
1127 struct temp_slot *p, *next;
1129 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1131 next = p->next;
1133 if (!p->keep)
1134 make_slot_available (p);
1137 combine_temp_slots ();
1140 /* Push deeper into the nesting level for stack temporaries. */
1142 void
1143 push_temp_slots (void)
1145 temp_slot_level++;
1148 /* Pop a temporary nesting level. All slots in use in the current level
1149 are freed. */
1151 void
1152 pop_temp_slots (void)
1154 struct temp_slot *p, *next;
1156 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1158 next = p->next;
1159 make_slot_available (p);
1162 combine_temp_slots ();
1164 temp_slot_level--;
1167 /* Initialize temporary slots. */
1169 void
1170 init_temp_slots (void)
1172 /* We have not allocated any temporaries yet. */
1173 avail_temp_slots = 0;
1174 used_temp_slots = 0;
1175 temp_slot_level = 0;
1178 /* These routines are responsible for converting virtual register references
1179 to the actual hard register references once RTL generation is complete.
1181 The following four variables are used for communication between the
1182 routines. They contain the offsets of the virtual registers from their
1183 respective hard registers. */
1185 static int in_arg_offset;
1186 static int var_offset;
1187 static int dynamic_offset;
1188 static int out_arg_offset;
1189 static int cfa_offset;
1191 /* In most machines, the stack pointer register is equivalent to the bottom
1192 of the stack. */
1194 #ifndef STACK_POINTER_OFFSET
1195 #define STACK_POINTER_OFFSET 0
1196 #endif
1198 /* If not defined, pick an appropriate default for the offset of dynamically
1199 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1200 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1202 #ifndef STACK_DYNAMIC_OFFSET
1204 /* The bottom of the stack points to the actual arguments. If
1205 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1206 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1207 stack space for register parameters is not pushed by the caller, but
1208 rather part of the fixed stack areas and hence not included in
1209 `current_function_outgoing_args_size'. Nevertheless, we must allow
1210 for it when allocating stack dynamic objects. */
1212 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1213 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1214 ((ACCUMULATE_OUTGOING_ARGS \
1215 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1216 + (STACK_POINTER_OFFSET)) \
1218 #else
1219 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1220 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1221 + (STACK_POINTER_OFFSET))
1222 #endif
1223 #endif
1225 /* On most machines, the CFA coincides with the first incoming parm. */
1227 #ifndef ARG_POINTER_CFA_OFFSET
1228 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1229 #endif
1232 /* Pass through the INSNS of function FNDECL and convert virtual register
1233 references to hard register references. */
1235 void
1236 instantiate_virtual_regs (void)
1238 rtx insn;
1240 /* Compute the offsets to use for this function. */
1241 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1242 var_offset = STARTING_FRAME_OFFSET;
1243 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1244 out_arg_offset = STACK_POINTER_OFFSET;
1245 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1247 /* Scan all variables and parameters of this function. For each that is
1248 in memory, instantiate all virtual registers if the result is a valid
1249 address. If not, we do it later. That will handle most uses of virtual
1250 regs on many machines. */
1251 instantiate_decls (current_function_decl, 1);
1253 /* Initialize recognition, indicating that volatile is OK. */
1254 init_recog ();
1256 /* Scan through all the insns, instantiating every virtual register still
1257 present. */
1258 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1259 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1260 || GET_CODE (insn) == CALL_INSN)
1262 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1263 if (INSN_DELETED_P (insn))
1264 continue;
1265 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1266 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1267 if (GET_CODE (insn) == CALL_INSN)
1268 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1269 NULL_RTX, 0);
1271 /* Past this point all ASM statements should match. Verify that
1272 to avoid failures later in the compilation process. */
1273 if (asm_noperands (PATTERN (insn)) >= 0
1274 && ! check_asm_operands (PATTERN (insn)))
1275 instantiate_virtual_regs_lossage (insn);
1278 /* Now instantiate the remaining register equivalences for debugging info.
1279 These will not be valid addresses. */
1280 instantiate_decls (current_function_decl, 0);
1282 /* Indicate that, from now on, assign_stack_local should use
1283 frame_pointer_rtx. */
1284 virtuals_instantiated = 1;
1287 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1288 all virtual registers in their DECL_RTL's.
1290 If VALID_ONLY, do this only if the resulting address is still valid.
1291 Otherwise, always do it. */
1293 static void
1294 instantiate_decls (tree fndecl, int valid_only)
1296 tree decl;
1298 /* Process all parameters of the function. */
1299 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1302 HOST_WIDE_INT size_rtl;
1304 instantiate_decl (DECL_RTL (decl), size, valid_only);
1306 /* If the parameter was promoted, then the incoming RTL mode may be
1307 larger than the declared type size. We must use the larger of
1308 the two sizes. */
1309 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1310 size = MAX (size_rtl, size);
1311 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1314 /* Now process all variables defined in the function or its subblocks. */
1315 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1318 /* Subroutine of instantiate_decls: Process all decls in the given
1319 BLOCK node and all its subblocks. */
1321 static void
1322 instantiate_decls_1 (tree let, int valid_only)
1324 tree t;
1326 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1327 if (DECL_RTL_SET_P (t))
1328 instantiate_decl (DECL_RTL (t),
1329 int_size_in_bytes (TREE_TYPE (t)),
1330 valid_only);
1332 /* Process all subblocks. */
1333 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1334 instantiate_decls_1 (t, valid_only);
1337 /* Subroutine of the preceding procedures: Given RTL representing a
1338 decl and the size of the object, do any instantiation required.
1340 If VALID_ONLY is nonzero, it means that the RTL should only be
1341 changed if the new address is valid. */
1343 static void
1344 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1346 enum machine_mode mode;
1347 rtx addr;
1349 /* If this is not a MEM, no need to do anything. Similarly if the
1350 address is a constant or a register that is not a virtual register. */
1352 if (x == 0 || !MEM_P (x))
1353 return;
1355 addr = XEXP (x, 0);
1356 if (CONSTANT_P (addr)
1357 || (REG_P (addr)
1358 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1359 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1360 return;
1362 /* If we should only do this if the address is valid, copy the address.
1363 We need to do this so we can undo any changes that might make the
1364 address invalid. This copy is unfortunate, but probably can't be
1365 avoided. */
1367 if (valid_only)
1368 addr = copy_rtx (addr);
1370 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1372 if (valid_only && size >= 0)
1374 unsigned HOST_WIDE_INT decl_size = size;
1376 /* Now verify that the resulting address is valid for every integer or
1377 floating-point mode up to and including SIZE bytes long. We do this
1378 since the object might be accessed in any mode and frame addresses
1379 are shared. */
1381 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1382 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1383 mode = GET_MODE_WIDER_MODE (mode))
1384 if (! memory_address_p (mode, addr))
1385 return;
1387 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1388 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1389 mode = GET_MODE_WIDER_MODE (mode))
1390 if (! memory_address_p (mode, addr))
1391 return;
1394 /* Put back the address now that we have updated it and we either know
1395 it is valid or we don't care whether it is valid. */
1397 XEXP (x, 0) = addr;
1400 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1401 is a virtual register, return the equivalent hard register and set the
1402 offset indirectly through the pointer. Otherwise, return 0. */
1404 static rtx
1405 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1407 rtx new;
1408 HOST_WIDE_INT offset;
1410 if (x == virtual_incoming_args_rtx)
1411 new = arg_pointer_rtx, offset = in_arg_offset;
1412 else if (x == virtual_stack_vars_rtx)
1413 new = frame_pointer_rtx, offset = var_offset;
1414 else if (x == virtual_stack_dynamic_rtx)
1415 new = stack_pointer_rtx, offset = dynamic_offset;
1416 else if (x == virtual_outgoing_args_rtx)
1417 new = stack_pointer_rtx, offset = out_arg_offset;
1418 else if (x == virtual_cfa_rtx)
1419 new = arg_pointer_rtx, offset = cfa_offset;
1420 else
1421 return 0;
1423 *poffset = offset;
1424 return new;
1428 /* Called when instantiate_virtual_regs has failed to update the instruction.
1429 Usually this means that non-matching instruction has been emit, however for
1430 asm statements it may be the problem in the constraints. */
1431 static void
1432 instantiate_virtual_regs_lossage (rtx insn)
1434 gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
1435 error_for_asm (insn, "impossible constraint in `asm'");
1436 delete_insn (insn);
1438 /* Given a pointer to a piece of rtx and an optional pointer to the
1439 containing object, instantiate any virtual registers present in it.
1441 If EXTRA_INSNS, we always do the replacement and generate
1442 any extra insns before OBJECT. If it zero, we do nothing if replacement
1443 is not valid.
1445 Return 1 if we either had nothing to do or if we were able to do the
1446 needed replacement. Return 0 otherwise; we only return zero if
1447 EXTRA_INSNS is zero.
1449 We first try some simple transformations to avoid the creation of extra
1450 pseudos. */
1452 static int
1453 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1455 rtx x;
1456 RTX_CODE code;
1457 rtx new = 0;
1458 HOST_WIDE_INT offset = 0;
1459 rtx temp;
1460 rtx seq;
1461 int i, j;
1462 const char *fmt;
1464 /* Re-start here to avoid recursion in common cases. */
1465 restart:
1467 x = *loc;
1468 if (x == 0)
1469 return 1;
1471 /* We may have detected and deleted invalid asm statements. */
1472 if (object && INSN_P (object) && INSN_DELETED_P (object))
1473 return 1;
1475 code = GET_CODE (x);
1477 /* Check for some special cases. */
1478 switch (code)
1480 case CONST_INT:
1481 case CONST_DOUBLE:
1482 case CONST_VECTOR:
1483 case CONST:
1484 case SYMBOL_REF:
1485 case CODE_LABEL:
1486 case PC:
1487 case CC0:
1488 case ASM_INPUT:
1489 case ADDR_VEC:
1490 case ADDR_DIFF_VEC:
1491 case RETURN:
1492 return 1;
1494 case SET:
1495 /* We are allowed to set the virtual registers. This means that
1496 the actual register should receive the source minus the
1497 appropriate offset. This is used, for example, in the handling
1498 of non-local gotos. */
1499 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1501 rtx src = SET_SRC (x);
1503 /* We are setting the register, not using it, so the relevant
1504 offset is the negative of the offset to use were we using
1505 the register. */
1506 offset = - offset;
1507 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1509 /* The only valid sources here are PLUS or REG. Just do
1510 the simplest possible thing to handle them. */
1511 if (!REG_P (src) && GET_CODE (src) != PLUS)
1513 instantiate_virtual_regs_lossage (object);
1514 return 1;
1517 start_sequence ();
1518 if (!REG_P (src))
1519 temp = force_operand (src, NULL_RTX);
1520 else
1521 temp = src;
1522 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1523 seq = get_insns ();
1524 end_sequence ();
1526 emit_insn_before (seq, object);
1527 SET_DEST (x) = new;
1529 if (! validate_change (object, &SET_SRC (x), temp, 0)
1530 || ! extra_insns)
1531 instantiate_virtual_regs_lossage (object);
1533 return 1;
1536 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1537 loc = &SET_SRC (x);
1538 goto restart;
1540 case PLUS:
1541 /* Handle special case of virtual register plus constant. */
1542 if (CONSTANT_P (XEXP (x, 1)))
1544 rtx old, new_offset;
1546 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1547 if (GET_CODE (XEXP (x, 0)) == PLUS)
1549 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1551 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1552 extra_insns);
1553 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1555 else
1557 loc = &XEXP (x, 0);
1558 goto restart;
1562 #ifdef POINTERS_EXTEND_UNSIGNED
1563 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1564 we can commute the PLUS and SUBREG because pointers into the
1565 frame are well-behaved. */
1566 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1567 && GET_CODE (XEXP (x, 1)) == CONST_INT
1568 && 0 != (new
1569 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1570 &offset))
1571 && validate_change (object, loc,
1572 plus_constant (gen_lowpart (ptr_mode,
1573 new),
1574 offset
1575 + INTVAL (XEXP (x, 1))),
1577 return 1;
1578 #endif
1579 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1581 /* We know the second operand is a constant. Unless the
1582 first operand is a REG (which has been already checked),
1583 it needs to be checked. */
1584 if (!REG_P (XEXP (x, 0)))
1586 loc = &XEXP (x, 0);
1587 goto restart;
1589 return 1;
1592 new_offset = plus_constant (XEXP (x, 1), offset);
1594 /* If the new constant is zero, try to replace the sum with just
1595 the register. */
1596 if (new_offset == const0_rtx
1597 && validate_change (object, loc, new, 0))
1598 return 1;
1600 /* Next try to replace the register and new offset.
1601 There are two changes to validate here and we can't assume that
1602 in the case of old offset equals new just changing the register
1603 will yield a valid insn. In the interests of a little efficiency,
1604 however, we only call validate change once (we don't queue up the
1605 changes and then call apply_change_group). */
1607 old = XEXP (x, 0);
1608 if (offset == 0
1609 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1610 : (XEXP (x, 0) = new,
1611 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1613 if (! extra_insns)
1615 XEXP (x, 0) = old;
1616 return 0;
1619 /* Otherwise copy the new constant into a register and replace
1620 constant with that register. */
1621 temp = gen_reg_rtx (Pmode);
1622 XEXP (x, 0) = new;
1623 if (validate_change (object, &XEXP (x, 1), temp, 0))
1624 emit_insn_before (gen_move_insn (temp, new_offset), object);
1625 else
1627 /* If that didn't work, replace this expression with a
1628 register containing the sum. */
1630 XEXP (x, 0) = old;
1631 new = gen_rtx_PLUS (Pmode, new, new_offset);
1633 start_sequence ();
1634 temp = force_operand (new, NULL_RTX);
1635 seq = get_insns ();
1636 end_sequence ();
1638 emit_insn_before (seq, object);
1639 if (! validate_change (object, loc, temp, 0)
1640 && ! validate_replace_rtx (x, temp, object))
1642 instantiate_virtual_regs_lossage (object);
1643 return 1;
1648 return 1;
1651 /* Fall through to generic two-operand expression case. */
1652 case EXPR_LIST:
1653 case CALL:
1654 case COMPARE:
1655 case MINUS:
1656 case MULT:
1657 case DIV: case UDIV:
1658 case MOD: case UMOD:
1659 case AND: case IOR: case XOR:
1660 case ROTATERT: case ROTATE:
1661 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1662 case NE: case EQ:
1663 case GE: case GT: case GEU: case GTU:
1664 case LE: case LT: case LEU: case LTU:
1665 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1666 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1667 loc = &XEXP (x, 0);
1668 goto restart;
1670 case MEM:
1671 /* Most cases of MEM that convert to valid addresses have already been
1672 handled by our scan of decls. The only special handling we
1673 need here is to make a copy of the rtx to ensure it isn't being
1674 shared if we have to change it to a pseudo.
1676 If the rtx is a simple reference to an address via a virtual register,
1677 it can potentially be shared. In such cases, first try to make it
1678 a valid address, which can also be shared. Otherwise, copy it and
1679 proceed normally.
1681 First check for common cases that need no processing. These are
1682 usually due to instantiation already being done on a previous instance
1683 of a shared rtx. */
1685 temp = XEXP (x, 0);
1686 if (CONSTANT_ADDRESS_P (temp)
1687 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1688 || temp == arg_pointer_rtx
1689 #endif
1690 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1691 || temp == hard_frame_pointer_rtx
1692 #endif
1693 || temp == frame_pointer_rtx)
1694 return 1;
1696 if (GET_CODE (temp) == PLUS
1697 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1698 && (XEXP (temp, 0) == frame_pointer_rtx
1699 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1700 || XEXP (temp, 0) == hard_frame_pointer_rtx
1701 #endif
1702 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1703 || XEXP (temp, 0) == arg_pointer_rtx
1704 #endif
1706 return 1;
1708 if (temp == virtual_stack_vars_rtx
1709 || temp == virtual_incoming_args_rtx
1710 || (GET_CODE (temp) == PLUS
1711 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1712 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1713 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1715 /* This MEM may be shared. If the substitution can be done without
1716 the need to generate new pseudos, we want to do it in place
1717 so all copies of the shared rtx benefit. The call below will
1718 only make substitutions if the resulting address is still
1719 valid.
1721 Note that we cannot pass X as the object in the recursive call
1722 since the insn being processed may not allow all valid
1723 addresses. However, if we were not passed on object, we can
1724 only modify X without copying it if X will have a valid
1725 address.
1727 ??? Also note that this can still lose if OBJECT is an insn that
1728 has less restrictions on an address that some other insn.
1729 In that case, we will modify the shared address. This case
1730 doesn't seem very likely, though. One case where this could
1731 happen is in the case of a USE or CLOBBER reference, but we
1732 take care of that below. */
1734 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1735 object ? object : x, 0))
1736 return 1;
1738 /* Otherwise make a copy and process that copy. We copy the entire
1739 RTL expression since it might be a PLUS which could also be
1740 shared. */
1741 *loc = x = copy_rtx (x);
1744 /* Fall through to generic unary operation case. */
1745 case PREFETCH:
1746 case SUBREG:
1747 case STRICT_LOW_PART:
1748 case NEG: case NOT:
1749 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1750 case SIGN_EXTEND: case ZERO_EXTEND:
1751 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1752 case FLOAT: case FIX:
1753 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1754 case ABS:
1755 case SQRT:
1756 case FFS:
1757 case CLZ: case CTZ:
1758 case POPCOUNT: case PARITY:
1759 /* These case either have just one operand or we know that we need not
1760 check the rest of the operands. */
1761 loc = &XEXP (x, 0);
1762 goto restart;
1764 case USE:
1765 case CLOBBER:
1766 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1767 go ahead and make the invalid one, but do it to a copy. For a REG,
1768 just make the recursive call, since there's no chance of a problem. */
1770 if ((MEM_P (XEXP (x, 0))
1771 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1773 || (REG_P (XEXP (x, 0))
1774 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1775 return 1;
1777 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1778 loc = &XEXP (x, 0);
1779 goto restart;
1781 case REG:
1782 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1783 in front of this insn and substitute the temporary. */
1784 if ((new = instantiate_new_reg (x, &offset)) != 0)
1786 temp = plus_constant (new, offset);
1787 if (!validate_change (object, loc, temp, 0))
1789 if (! extra_insns)
1790 return 0;
1792 start_sequence ();
1793 temp = force_operand (temp, NULL_RTX);
1794 seq = get_insns ();
1795 end_sequence ();
1797 emit_insn_before (seq, object);
1798 if (! validate_change (object, loc, temp, 0)
1799 && ! validate_replace_rtx (x, temp, object))
1800 instantiate_virtual_regs_lossage (object);
1804 return 1;
1806 default:
1807 break;
1810 /* Scan all subexpressions. */
1811 fmt = GET_RTX_FORMAT (code);
1812 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1813 if (*fmt == 'e')
1815 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1816 return 0;
1818 else if (*fmt == 'E')
1819 for (j = 0; j < XVECLEN (x, i); j++)
1820 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1821 extra_insns))
1822 return 0;
1824 return 1;
1827 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1828 This means a type for which function calls must pass an address to the
1829 function or get an address back from the function.
1830 EXP may be a type node or an expression (whose type is tested). */
1833 aggregate_value_p (tree exp, tree fntype)
1835 int i, regno, nregs;
1836 rtx reg;
1838 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1840 if (fntype)
1841 switch (TREE_CODE (fntype))
1843 case CALL_EXPR:
1844 fntype = get_callee_fndecl (fntype);
1845 fntype = fntype ? TREE_TYPE (fntype) : 0;
1846 break;
1847 case FUNCTION_DECL:
1848 fntype = TREE_TYPE (fntype);
1849 break;
1850 case FUNCTION_TYPE:
1851 case METHOD_TYPE:
1852 break;
1853 case IDENTIFIER_NODE:
1854 fntype = 0;
1855 break;
1856 default:
1857 /* We don't expect other rtl types here. */
1858 gcc_unreachable ();
1861 if (TREE_CODE (type) == VOID_TYPE)
1862 return 0;
1863 /* If the front end has decided that this needs to be passed by
1864 reference, do so. */
1865 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1866 && DECL_BY_REFERENCE (exp))
1867 return 1;
1868 if (targetm.calls.return_in_memory (type, fntype))
1869 return 1;
1870 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1871 and thus can't be returned in registers. */
1872 if (TREE_ADDRESSABLE (type))
1873 return 1;
1874 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1875 return 1;
1876 /* Make sure we have suitable call-clobbered regs to return
1877 the value in; if not, we must return it in memory. */
1878 reg = hard_function_value (type, 0, 0);
1880 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1881 it is OK. */
1882 if (!REG_P (reg))
1883 return 0;
1885 regno = REGNO (reg);
1886 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1887 for (i = 0; i < nregs; i++)
1888 if (! call_used_regs[regno + i])
1889 return 1;
1890 return 0;
1893 /* Return true if we should assign DECL a pseudo register; false if it
1894 should live on the local stack. */
1896 bool
1897 use_register_for_decl (tree decl)
1899 /* Honor volatile. */
1900 if (TREE_SIDE_EFFECTS (decl))
1901 return false;
1903 /* Honor addressability. */
1904 if (TREE_ADDRESSABLE (decl))
1905 return false;
1907 /* Only register-like things go in registers. */
1908 if (DECL_MODE (decl) == BLKmode)
1909 return false;
1911 /* If -ffloat-store specified, don't put explicit float variables
1912 into registers. */
1913 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1914 propagates values across these stores, and it probably shouldn't. */
1915 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1916 return false;
1918 /* Compiler-generated temporaries can always go in registers. */
1919 if (DECL_ARTIFICIAL (decl))
1920 return true;
1922 #ifdef NON_SAVING_SETJMP
1923 /* Protect variables not declared "register" from setjmp. */
1924 if (NON_SAVING_SETJMP
1925 && current_function_calls_setjmp
1926 && !DECL_REGISTER (decl))
1927 return false;
1928 #endif
1930 return (optimize || DECL_REGISTER (decl));
1933 /* Return true if TYPE should be passed by invisible reference. */
1935 bool
1936 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1937 tree type, bool named_arg)
1939 if (type)
1941 /* If this type contains non-trivial constructors, then it is
1942 forbidden for the middle-end to create any new copies. */
1943 if (TREE_ADDRESSABLE (type))
1944 return true;
1946 /* GCC post 3.4 passes *all* variable sized types by reference. */
1947 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1948 return true;
1951 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1954 /* Return true if TYPE, which is passed by reference, should be callee
1955 copied instead of caller copied. */
1957 bool
1958 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1959 tree type, bool named_arg)
1961 if (type && TREE_ADDRESSABLE (type))
1962 return false;
1963 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1966 /* Structures to communicate between the subroutines of assign_parms.
1967 The first holds data persistent across all parameters, the second
1968 is cleared out for each parameter. */
1970 struct assign_parm_data_all
1972 CUMULATIVE_ARGS args_so_far;
1973 struct args_size stack_args_size;
1974 tree function_result_decl;
1975 tree orig_fnargs;
1976 rtx conversion_insns;
1977 HOST_WIDE_INT pretend_args_size;
1978 HOST_WIDE_INT extra_pretend_bytes;
1979 int reg_parm_stack_space;
1982 struct assign_parm_data_one
1984 tree nominal_type;
1985 tree passed_type;
1986 rtx entry_parm;
1987 rtx stack_parm;
1988 enum machine_mode nominal_mode;
1989 enum machine_mode passed_mode;
1990 enum machine_mode promoted_mode;
1991 struct locate_and_pad_arg_data locate;
1992 int partial;
1993 BOOL_BITFIELD named_arg : 1;
1994 BOOL_BITFIELD last_named : 1;
1995 BOOL_BITFIELD passed_pointer : 1;
1996 BOOL_BITFIELD on_stack : 1;
1997 BOOL_BITFIELD loaded_in_reg : 1;
2000 /* A subroutine of assign_parms. Initialize ALL. */
2002 static void
2003 assign_parms_initialize_all (struct assign_parm_data_all *all)
2005 tree fntype;
2007 memset (all, 0, sizeof (*all));
2009 fntype = TREE_TYPE (current_function_decl);
2011 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2012 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2013 #else
2014 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2015 current_function_decl, -1);
2016 #endif
2018 #ifdef REG_PARM_STACK_SPACE
2019 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2020 #endif
2023 /* If ARGS contains entries with complex types, split the entry into two
2024 entries of the component type. Return a new list of substitutions are
2025 needed, else the old list. */
2027 static tree
2028 split_complex_args (tree args)
2030 tree p;
2032 /* Before allocating memory, check for the common case of no complex. */
2033 for (p = args; p; p = TREE_CHAIN (p))
2035 tree type = TREE_TYPE (p);
2036 if (TREE_CODE (type) == COMPLEX_TYPE
2037 && targetm.calls.split_complex_arg (type))
2038 goto found;
2040 return args;
2042 found:
2043 args = copy_list (args);
2045 for (p = args; p; p = TREE_CHAIN (p))
2047 tree type = TREE_TYPE (p);
2048 if (TREE_CODE (type) == COMPLEX_TYPE
2049 && targetm.calls.split_complex_arg (type))
2051 tree decl;
2052 tree subtype = TREE_TYPE (type);
2054 /* Rewrite the PARM_DECL's type with its component. */
2055 TREE_TYPE (p) = subtype;
2056 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2057 DECL_MODE (p) = VOIDmode;
2058 DECL_SIZE (p) = NULL;
2059 DECL_SIZE_UNIT (p) = NULL;
2060 layout_decl (p, 0);
2062 /* Build a second synthetic decl. */
2063 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2064 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2065 layout_decl (decl, 0);
2067 /* Splice it in; skip the new decl. */
2068 TREE_CHAIN (decl) = TREE_CHAIN (p);
2069 TREE_CHAIN (p) = decl;
2070 p = decl;
2074 return args;
2077 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2078 the hidden struct return argument, and (abi willing) complex args.
2079 Return the new parameter list. */
2081 static tree
2082 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2084 tree fndecl = current_function_decl;
2085 tree fntype = TREE_TYPE (fndecl);
2086 tree fnargs = DECL_ARGUMENTS (fndecl);
2088 /* If struct value address is treated as the first argument, make it so. */
2089 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2090 && ! current_function_returns_pcc_struct
2091 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2093 tree type = build_pointer_type (TREE_TYPE (fntype));
2094 tree decl;
2096 decl = build_decl (PARM_DECL, NULL_TREE, type);
2097 DECL_ARG_TYPE (decl) = type;
2098 DECL_ARTIFICIAL (decl) = 1;
2100 TREE_CHAIN (decl) = fnargs;
2101 fnargs = decl;
2102 all->function_result_decl = decl;
2105 all->orig_fnargs = fnargs;
2107 /* If the target wants to split complex arguments into scalars, do so. */
2108 if (targetm.calls.split_complex_arg)
2109 fnargs = split_complex_args (fnargs);
2111 return fnargs;
2114 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2115 data for the parameter. Incorporate ABI specifics such as pass-by-
2116 reference and type promotion. */
2118 static void
2119 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2120 struct assign_parm_data_one *data)
2122 tree nominal_type, passed_type;
2123 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2125 memset (data, 0, sizeof (*data));
2127 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2128 if (current_function_stdarg)
2130 tree tem;
2131 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2132 if (DECL_NAME (tem))
2133 break;
2134 if (tem == 0)
2135 data->last_named = true;
2138 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2139 most machines, if this is a varargs/stdarg function, then we treat
2140 the last named arg as if it were anonymous too. */
2141 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2142 data->named_arg = 1;
2143 else
2144 data->named_arg = !data->last_named;
2146 nominal_type = TREE_TYPE (parm);
2147 passed_type = DECL_ARG_TYPE (parm);
2149 /* Look out for errors propagating this far. Also, if the parameter's
2150 type is void then its value doesn't matter. */
2151 if (TREE_TYPE (parm) == error_mark_node
2152 /* This can happen after weird syntax errors
2153 or if an enum type is defined among the parms. */
2154 || TREE_CODE (parm) != PARM_DECL
2155 || passed_type == NULL
2156 || VOID_TYPE_P (nominal_type))
2158 nominal_type = passed_type = void_type_node;
2159 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2160 goto egress;
2163 /* Find mode of arg as it is passed, and mode of arg as it should be
2164 during execution of this function. */
2165 passed_mode = TYPE_MODE (passed_type);
2166 nominal_mode = TYPE_MODE (nominal_type);
2168 /* If the parm is to be passed as a transparent union, use the type of
2169 the first field for the tests below. We have already verified that
2170 the modes are the same. */
2171 if (DECL_TRANSPARENT_UNION (parm)
2172 || (TREE_CODE (passed_type) == UNION_TYPE
2173 && TYPE_TRANSPARENT_UNION (passed_type)))
2174 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2176 /* See if this arg was passed by invisible reference. */
2177 if (pass_by_reference (&all->args_so_far, passed_mode,
2178 passed_type, data->named_arg))
2180 passed_type = nominal_type = build_pointer_type (passed_type);
2181 data->passed_pointer = true;
2182 passed_mode = nominal_mode = Pmode;
2185 /* Find mode as it is passed by the ABI. */
2186 promoted_mode = passed_mode;
2187 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2189 int unsignedp = TYPE_UNSIGNED (passed_type);
2190 promoted_mode = promote_mode (passed_type, promoted_mode,
2191 &unsignedp, 1);
2194 egress:
2195 data->nominal_type = nominal_type;
2196 data->passed_type = passed_type;
2197 data->nominal_mode = nominal_mode;
2198 data->passed_mode = passed_mode;
2199 data->promoted_mode = promoted_mode;
2202 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2204 static void
2205 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2206 struct assign_parm_data_one *data, bool no_rtl)
2208 int varargs_pretend_bytes = 0;
2210 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2211 data->promoted_mode,
2212 data->passed_type,
2213 &varargs_pretend_bytes, no_rtl);
2215 /* If the back-end has requested extra stack space, record how much is
2216 needed. Do not change pretend_args_size otherwise since it may be
2217 nonzero from an earlier partial argument. */
2218 if (varargs_pretend_bytes > 0)
2219 all->pretend_args_size = varargs_pretend_bytes;
2222 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2223 the incoming location of the current parameter. */
2225 static void
2226 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2227 struct assign_parm_data_one *data)
2229 HOST_WIDE_INT pretend_bytes = 0;
2230 rtx entry_parm;
2231 bool in_regs;
2233 if (data->promoted_mode == VOIDmode)
2235 data->entry_parm = data->stack_parm = const0_rtx;
2236 return;
2239 #ifdef FUNCTION_INCOMING_ARG
2240 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2241 data->passed_type, data->named_arg);
2242 #else
2243 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2244 data->passed_type, data->named_arg);
2245 #endif
2247 if (entry_parm == 0)
2248 data->promoted_mode = data->passed_mode;
2250 /* Determine parm's home in the stack, in case it arrives in the stack
2251 or we should pretend it did. Compute the stack position and rtx where
2252 the argument arrives and its size.
2254 There is one complexity here: If this was a parameter that would
2255 have been passed in registers, but wasn't only because it is
2256 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2257 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2258 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2259 as it was the previous time. */
2260 in_regs = entry_parm != 0;
2261 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2262 in_regs = true;
2263 #endif
2264 if (!in_regs && !data->named_arg)
2266 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2268 rtx tem;
2269 #ifdef FUNCTION_INCOMING_ARG
2270 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2271 data->passed_type, true);
2272 #else
2273 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2274 data->passed_type, true);
2275 #endif
2276 in_regs = tem != NULL;
2280 /* If this parameter was passed both in registers and in the stack, use
2281 the copy on the stack. */
2282 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2283 data->passed_type))
2284 entry_parm = 0;
2286 if (entry_parm)
2288 int partial;
2290 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2291 data->promoted_mode,
2292 data->passed_type,
2293 data->named_arg);
2294 data->partial = partial;
2296 /* The caller might already have allocated stack space for the
2297 register parameters. */
2298 if (partial != 0 && all->reg_parm_stack_space == 0)
2300 /* Part of this argument is passed in registers and part
2301 is passed on the stack. Ask the prologue code to extend
2302 the stack part so that we can recreate the full value.
2304 PRETEND_BYTES is the size of the registers we need to store.
2305 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2306 stack space that the prologue should allocate.
2308 Internally, gcc assumes that the argument pointer is aligned
2309 to STACK_BOUNDARY bits. This is used both for alignment
2310 optimizations (see init_emit) and to locate arguments that are
2311 aligned to more than PARM_BOUNDARY bits. We must preserve this
2312 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2313 a stack boundary. */
2315 /* We assume at most one partial arg, and it must be the first
2316 argument on the stack. */
2317 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2319 pretend_bytes = partial * UNITS_PER_WORD;
2320 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2322 /* We want to align relative to the actual stack pointer, so
2323 don't include this in the stack size until later. */
2324 all->extra_pretend_bytes = all->pretend_args_size;
2328 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2329 entry_parm ? data->partial : 0, current_function_decl,
2330 &all->stack_args_size, &data->locate);
2332 /* Adjust offsets to include the pretend args. */
2333 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2334 data->locate.slot_offset.constant += pretend_bytes;
2335 data->locate.offset.constant += pretend_bytes;
2337 data->entry_parm = entry_parm;
2340 /* A subroutine of assign_parms. If there is actually space on the stack
2341 for this parm, count it in stack_args_size and return true. */
2343 static bool
2344 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2345 struct assign_parm_data_one *data)
2347 /* Trivially true if we've no incoming register. */
2348 if (data->entry_parm == NULL)
2350 /* Also true if we're partially in registers and partially not,
2351 since we've arranged to drop the entire argument on the stack. */
2352 else if (data->partial != 0)
2354 /* Also true if the target says that it's passed in both registers
2355 and on the stack. */
2356 else if (GET_CODE (data->entry_parm) == PARALLEL
2357 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2359 /* Also true if the target says that there's stack allocated for
2360 all register parameters. */
2361 else if (all->reg_parm_stack_space > 0)
2363 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2364 else
2365 return false;
2367 all->stack_args_size.constant += data->locate.size.constant;
2368 if (data->locate.size.var)
2369 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2371 return true;
2374 /* A subroutine of assign_parms. Given that this parameter is allocated
2375 stack space by the ABI, find it. */
2377 static void
2378 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2380 rtx offset_rtx, stack_parm;
2381 unsigned int align, boundary;
2383 /* If we're passing this arg using a reg, make its stack home the
2384 aligned stack slot. */
2385 if (data->entry_parm)
2386 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2387 else
2388 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2390 stack_parm = current_function_internal_arg_pointer;
2391 if (offset_rtx != const0_rtx)
2392 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2393 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2395 set_mem_attributes (stack_parm, parm, 1);
2397 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2398 align = 0;
2400 /* If we're padding upward, we know that the alignment of the slot
2401 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2402 intentionally forcing upward padding. Otherwise we have to come
2403 up with a guess at the alignment based on OFFSET_RTX. */
2404 if (data->locate.where_pad == upward || data->entry_parm)
2405 align = boundary;
2406 else if (GET_CODE (offset_rtx) == CONST_INT)
2408 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2409 align = align & -align;
2411 if (align > 0)
2412 set_mem_align (stack_parm, align);
2414 if (data->entry_parm)
2415 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2417 data->stack_parm = stack_parm;
2420 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2421 always valid and contiguous. */
2423 static void
2424 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2426 rtx entry_parm = data->entry_parm;
2427 rtx stack_parm = data->stack_parm;
2429 /* If this parm was passed part in regs and part in memory, pretend it
2430 arrived entirely in memory by pushing the register-part onto the stack.
2431 In the special case of a DImode or DFmode that is split, we could put
2432 it together in a pseudoreg directly, but for now that's not worth
2433 bothering with. */
2434 if (data->partial != 0)
2436 /* Handle calls that pass values in multiple non-contiguous
2437 locations. The Irix 6 ABI has examples of this. */
2438 if (GET_CODE (entry_parm) == PARALLEL)
2439 emit_group_store (validize_mem (stack_parm), entry_parm,
2440 data->passed_type,
2441 int_size_in_bytes (data->passed_type));
2442 else
2443 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2444 data->partial);
2446 entry_parm = stack_parm;
2449 /* If we didn't decide this parm came in a register, by default it came
2450 on the stack. */
2451 else if (entry_parm == NULL)
2452 entry_parm = stack_parm;
2454 /* When an argument is passed in multiple locations, we can't make use
2455 of this information, but we can save some copying if the whole argument
2456 is passed in a single register. */
2457 else if (GET_CODE (entry_parm) == PARALLEL
2458 && data->nominal_mode != BLKmode
2459 && data->passed_mode != BLKmode)
2461 size_t i, len = XVECLEN (entry_parm, 0);
2463 for (i = 0; i < len; i++)
2464 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2465 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2466 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2467 == data->passed_mode)
2468 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2470 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2471 break;
2475 data->entry_parm = entry_parm;
2478 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2479 always valid and properly aligned. */
2482 static void
2483 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2485 rtx stack_parm = data->stack_parm;
2487 /* If we can't trust the parm stack slot to be aligned enough for its
2488 ultimate type, don't use that slot after entry. We'll make another
2489 stack slot, if we need one. */
2490 if (STRICT_ALIGNMENT && stack_parm
2491 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2492 stack_parm = NULL;
2494 /* If parm was passed in memory, and we need to convert it on entry,
2495 don't store it back in that same slot. */
2496 else if (data->entry_parm == stack_parm
2497 && data->nominal_mode != BLKmode
2498 && data->nominal_mode != data->passed_mode)
2499 stack_parm = NULL;
2501 data->stack_parm = stack_parm;
2504 /* A subroutine of assign_parms. Return true if the current parameter
2505 should be stored as a BLKmode in the current frame. */
2507 static bool
2508 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2510 if (data->nominal_mode == BLKmode)
2511 return true;
2512 if (GET_CODE (data->entry_parm) == PARALLEL)
2513 return true;
2515 #ifdef BLOCK_REG_PADDING
2516 if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2517 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
2518 return true;
2519 #endif
2521 return false;
2524 /* A subroutine of assign_parms. Arrange for the parameter to be
2525 present and valid in DATA->STACK_RTL. */
2527 static void
2528 assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
2530 rtx entry_parm = data->entry_parm;
2531 rtx stack_parm = data->stack_parm;
2533 /* If we've a non-block object that's nevertheless passed in parts,
2534 reconstitute it in register operations rather than on the stack. */
2535 if (GET_CODE (entry_parm) == PARALLEL
2536 && data->nominal_mode != BLKmode
2537 && XVECLEN (entry_parm, 0) > 1
2538 && use_register_for_decl (parm))
2540 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2542 emit_group_store (parmreg, entry_parm, data->nominal_type,
2543 int_size_in_bytes (data->nominal_type));
2544 SET_DECL_RTL (parm, parmreg);
2545 return;
2548 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2549 calls that pass values in multiple non-contiguous locations. */
2550 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2552 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2553 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2554 rtx mem;
2556 /* Note that we will be storing an integral number of words.
2557 So we have to be careful to ensure that we allocate an
2558 integral number of words. We do this below in the
2559 assign_stack_local if space was not allocated in the argument
2560 list. If it was, this will not work if PARM_BOUNDARY is not
2561 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2562 if it becomes a problem. Exception is when BLKmode arrives
2563 with arguments not conforming to word_mode. */
2565 if (stack_parm == 0)
2567 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2568 data->stack_parm = stack_parm;
2569 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2570 set_mem_attributes (stack_parm, parm, 1);
2572 else if (GET_CODE (entry_parm) == PARALLEL)
2574 else
2575 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2577 mem = validize_mem (stack_parm);
2579 /* Handle values in multiple non-contiguous locations. */
2580 if (GET_CODE (entry_parm) == PARALLEL)
2581 emit_group_store (mem, entry_parm, data->passed_type, size);
2583 else if (size == 0)
2586 /* If SIZE is that of a mode no bigger than a word, just use
2587 that mode's store operation. */
2588 else if (size <= UNITS_PER_WORD)
2590 enum machine_mode mode
2591 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2593 if (mode != BLKmode
2594 #ifdef BLOCK_REG_PADDING
2595 && (size == UNITS_PER_WORD
2596 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2597 != (BYTES_BIG_ENDIAN ? upward : downward)))
2598 #endif
2601 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2602 emit_move_insn (change_address (mem, mode, 0), reg);
2605 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2606 machine must be aligned to the left before storing
2607 to memory. Note that the previous test doesn't
2608 handle all cases (e.g. SIZE == 3). */
2609 else if (size != UNITS_PER_WORD
2610 #ifdef BLOCK_REG_PADDING
2611 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2612 == downward)
2613 #else
2614 && BYTES_BIG_ENDIAN
2615 #endif
2618 rtx tem, x;
2619 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2620 rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
2622 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2623 build_int_cst (NULL_TREE, by),
2624 NULL_RTX, 1);
2625 tem = change_address (mem, word_mode, 0);
2626 emit_move_insn (tem, x);
2628 else
2629 move_block_from_reg (REGNO (data->entry_parm), mem,
2630 size_stored / UNITS_PER_WORD);
2632 else
2633 move_block_from_reg (REGNO (data->entry_parm), mem,
2634 size_stored / UNITS_PER_WORD);
2637 SET_DECL_RTL (parm, stack_parm);
2640 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2641 parameter. Get it there. Perform all ABI specified conversions. */
2643 static void
2644 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2645 struct assign_parm_data_one *data)
2647 rtx parmreg;
2648 enum machine_mode promoted_nominal_mode;
2649 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2650 bool did_conversion = false;
2652 /* Store the parm in a pseudoregister during the function, but we may
2653 need to do it in a wider mode. */
2655 promoted_nominal_mode
2656 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2658 parmreg = gen_reg_rtx (promoted_nominal_mode);
2660 if (!DECL_ARTIFICIAL (parm))
2661 mark_user_reg (parmreg);
2663 /* If this was an item that we received a pointer to,
2664 set DECL_RTL appropriately. */
2665 if (data->passed_pointer)
2667 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2668 set_mem_attributes (x, parm, 1);
2669 SET_DECL_RTL (parm, x);
2671 else
2672 SET_DECL_RTL (parm, parmreg);
2674 /* Copy the value into the register. */
2675 if (data->nominal_mode != data->passed_mode
2676 || promoted_nominal_mode != data->promoted_mode)
2678 int save_tree_used;
2680 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2681 mode, by the caller. We now have to convert it to
2682 NOMINAL_MODE, if different. However, PARMREG may be in
2683 a different mode than NOMINAL_MODE if it is being stored
2684 promoted.
2686 If ENTRY_PARM is a hard register, it might be in a register
2687 not valid for operating in its mode (e.g., an odd-numbered
2688 register for a DFmode). In that case, moves are the only
2689 thing valid, so we can't do a convert from there. This
2690 occurs when the calling sequence allow such misaligned
2691 usages.
2693 In addition, the conversion may involve a call, which could
2694 clobber parameters which haven't been copied to pseudo
2695 registers yet. Therefore, we must first copy the parm to
2696 a pseudo reg here, and save the conversion until after all
2697 parameters have been moved. */
2699 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2701 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2703 push_to_sequence (all->conversion_insns);
2704 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2706 if (GET_CODE (tempreg) == SUBREG
2707 && GET_MODE (tempreg) == data->nominal_mode
2708 && REG_P (SUBREG_REG (tempreg))
2709 && data->nominal_mode == data->passed_mode
2710 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2711 && GET_MODE_SIZE (GET_MODE (tempreg))
2712 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2714 /* The argument is already sign/zero extended, so note it
2715 into the subreg. */
2716 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2717 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2720 /* TREE_USED gets set erroneously during expand_assignment. */
2721 save_tree_used = TREE_USED (parm);
2722 expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
2723 TREE_USED (parm) = save_tree_used;
2724 all->conversion_insns = get_insns ();
2725 end_sequence ();
2727 did_conversion = true;
2729 else
2730 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2732 /* If we were passed a pointer but the actual value can safely live
2733 in a register, put it in one. */
2734 if (data->passed_pointer
2735 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2736 /* If by-reference argument was promoted, demote it. */
2737 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2738 || use_register_for_decl (parm)))
2740 /* We can't use nominal_mode, because it will have been set to
2741 Pmode above. We must use the actual mode of the parm. */
2742 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2743 mark_user_reg (parmreg);
2745 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2747 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2748 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2750 push_to_sequence (all->conversion_insns);
2751 emit_move_insn (tempreg, DECL_RTL (parm));
2752 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2753 emit_move_insn (parmreg, tempreg);
2754 all->conversion_insns = get_insns();
2755 end_sequence ();
2757 did_conversion = true;
2759 else
2760 emit_move_insn (parmreg, DECL_RTL (parm));
2762 SET_DECL_RTL (parm, parmreg);
2764 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2765 now the parm. */
2766 data->stack_parm = NULL;
2769 /* If we are passed an arg by reference and it is our responsibility
2770 to make a copy, do it now.
2771 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2772 original argument, so we must recreate them in the call to
2773 FUNCTION_ARG_CALLEE_COPIES. */
2774 /* ??? Later add code to handle the case that if the argument isn't
2775 modified, don't do the copy. */
2777 else if (data->passed_pointer)
2779 tree type = TREE_TYPE (data->passed_type);
2781 if (reference_callee_copied (&all->args_so_far, TYPE_MODE (type),
2782 type, data->named_arg))
2784 rtx copy;
2786 /* This sequence may involve a library call perhaps clobbering
2787 registers that haven't been copied to pseudos yet. */
2789 push_to_sequence (all->conversion_insns);
2791 if (!COMPLETE_TYPE_P (type)
2792 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2794 /* This is a variable sized object. */
2795 copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
2796 TYPE_ALIGN (type));
2797 copy = gen_rtx_MEM (BLKmode, copy);
2799 else
2800 copy = assign_stack_temp (TYPE_MODE (type),
2801 int_size_in_bytes (type), 1);
2802 set_mem_attributes (copy, parm, 1);
2804 store_expr (parm, copy, 0);
2805 emit_move_insn (parmreg, XEXP (copy, 0));
2806 all->conversion_insns = get_insns ();
2807 end_sequence ();
2809 did_conversion = true;
2813 /* Mark the register as eliminable if we did no conversion and it was
2814 copied from memory at a fixed offset, and the arg pointer was not
2815 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2816 offset formed an invalid address, such memory-equivalences as we
2817 make here would screw up life analysis for it. */
2818 if (data->nominal_mode == data->passed_mode
2819 && !did_conversion
2820 && data->stack_parm != 0
2821 && MEM_P (data->stack_parm)
2822 && data->locate.offset.var == 0
2823 && reg_mentioned_p (virtual_incoming_args_rtx,
2824 XEXP (data->stack_parm, 0)))
2826 rtx linsn = get_last_insn ();
2827 rtx sinsn, set;
2829 /* Mark complex types separately. */
2830 if (GET_CODE (parmreg) == CONCAT)
2832 enum machine_mode submode
2833 = GET_MODE_INNER (GET_MODE (parmreg));
2834 int regnor = REGNO (gen_realpart (submode, parmreg));
2835 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2836 rtx stackr = gen_realpart (submode, data->stack_parm);
2837 rtx stacki = gen_imagpart (submode, data->stack_parm);
2839 /* Scan backwards for the set of the real and
2840 imaginary parts. */
2841 for (sinsn = linsn; sinsn != 0;
2842 sinsn = prev_nonnote_insn (sinsn))
2844 set = single_set (sinsn);
2845 if (set == 0)
2846 continue;
2848 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2849 REG_NOTES (sinsn)
2850 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2851 REG_NOTES (sinsn));
2852 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2853 REG_NOTES (sinsn)
2854 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2855 REG_NOTES (sinsn));
2858 else if ((set = single_set (linsn)) != 0
2859 && SET_DEST (set) == parmreg)
2860 REG_NOTES (linsn)
2861 = gen_rtx_EXPR_LIST (REG_EQUIV,
2862 data->stack_parm, REG_NOTES (linsn));
2865 /* For pointer data type, suggest pointer register. */
2866 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2867 mark_reg_pointer (parmreg,
2868 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2871 /* A subroutine of assign_parms. Allocate stack space to hold the current
2872 parameter. Get it there. Perform all ABI specified conversions. */
2874 static void
2875 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2876 struct assign_parm_data_one *data)
2878 /* Value must be stored in the stack slot STACK_PARM during function
2879 execution. */
2881 if (data->promoted_mode != data->nominal_mode)
2883 /* Conversion is required. */
2884 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2886 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2888 push_to_sequence (all->conversion_insns);
2889 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2890 TYPE_UNSIGNED (TREE_TYPE (parm)));
2892 if (data->stack_parm)
2893 /* ??? This may need a big-endian conversion on sparc64. */
2894 data->stack_parm
2895 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2897 all->conversion_insns = get_insns ();
2898 end_sequence ();
2901 if (data->entry_parm != data->stack_parm)
2903 if (data->stack_parm == 0)
2905 data->stack_parm
2906 = assign_stack_local (GET_MODE (data->entry_parm),
2907 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2909 set_mem_attributes (data->stack_parm, parm, 1);
2912 if (data->promoted_mode != data->nominal_mode)
2914 push_to_sequence (all->conversion_insns);
2915 emit_move_insn (validize_mem (data->stack_parm),
2916 validize_mem (data->entry_parm));
2917 all->conversion_insns = get_insns ();
2918 end_sequence ();
2920 else
2921 emit_move_insn (validize_mem (data->stack_parm),
2922 validize_mem (data->entry_parm));
2925 SET_DECL_RTL (parm, data->stack_parm);
2928 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2929 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2931 static void
2932 assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
2934 tree parm;
2936 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2938 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2939 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2941 rtx tmp, real, imag;
2942 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2944 real = DECL_RTL (fnargs);
2945 imag = DECL_RTL (TREE_CHAIN (fnargs));
2946 if (inner != GET_MODE (real))
2948 real = gen_lowpart_SUBREG (inner, real);
2949 imag = gen_lowpart_SUBREG (inner, imag);
2951 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2952 SET_DECL_RTL (parm, tmp);
2954 real = DECL_INCOMING_RTL (fnargs);
2955 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2956 if (inner != GET_MODE (real))
2958 real = gen_lowpart_SUBREG (inner, real);
2959 imag = gen_lowpart_SUBREG (inner, imag);
2961 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2962 set_decl_incoming_rtl (parm, tmp);
2963 fnargs = TREE_CHAIN (fnargs);
2965 else
2967 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2968 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2970 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2971 instead of the copy of decl, i.e. FNARGS. */
2972 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2973 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2976 fnargs = TREE_CHAIN (fnargs);
2980 /* Assign RTL expressions to the function's parameters. This may involve
2981 copying them into registers and using those registers as the DECL_RTL. */
2983 void
2984 assign_parms (tree fndecl)
2986 struct assign_parm_data_all all;
2987 tree fnargs, parm;
2988 rtx internal_arg_pointer;
2989 int varargs_setup = 0;
2991 /* If the reg that the virtual arg pointer will be translated into is
2992 not a fixed reg or is the stack pointer, make a copy of the virtual
2993 arg pointer, and address parms via the copy. The frame pointer is
2994 considered fixed even though it is not marked as such.
2996 The second time through, simply use ap to avoid generating rtx. */
2998 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2999 || ! (fixed_regs[ARG_POINTER_REGNUM]
3000 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3001 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3002 else
3003 internal_arg_pointer = virtual_incoming_args_rtx;
3004 current_function_internal_arg_pointer = internal_arg_pointer;
3006 assign_parms_initialize_all (&all);
3007 fnargs = assign_parms_augmented_arg_list (&all);
3009 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3011 struct assign_parm_data_one data;
3013 /* Extract the type of PARM; adjust it according to ABI. */
3014 assign_parm_find_data_types (&all, parm, &data);
3016 /* Early out for errors and void parameters. */
3017 if (data.passed_mode == VOIDmode)
3019 SET_DECL_RTL (parm, const0_rtx);
3020 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3021 continue;
3024 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3025 for the unnamed dummy argument following the last named argument.
3026 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3027 we only want to do this when we get to the actual last named
3028 argument, which will be the first time LAST_NAMED gets set. */
3029 if (data.last_named && !varargs_setup)
3031 varargs_setup = true;
3032 assign_parms_setup_varargs (&all, &data, false);
3035 /* Find out where the parameter arrives in this function. */
3036 assign_parm_find_entry_rtl (&all, &data);
3038 /* Find out where stack space for this parameter might be. */
3039 if (assign_parm_is_stack_parm (&all, &data))
3041 assign_parm_find_stack_rtl (parm, &data);
3042 assign_parm_adjust_entry_rtl (&data);
3045 /* Record permanently how this parm was passed. */
3046 set_decl_incoming_rtl (parm, data.entry_parm);
3048 /* Update info on where next arg arrives in registers. */
3049 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3050 data.passed_type, data.named_arg);
3052 assign_parm_adjust_stack_rtl (&data);
3054 if (assign_parm_setup_block_p (&data))
3055 assign_parm_setup_block (parm, &data);
3056 else if (data.passed_pointer || use_register_for_decl (parm))
3057 assign_parm_setup_reg (&all, parm, &data);
3058 else
3059 assign_parm_setup_stack (&all, parm, &data);
3062 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3063 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3065 /* Output all parameter conversion instructions (possibly including calls)
3066 now that all parameters have been copied out of hard registers. */
3067 emit_insn (all.conversion_insns);
3069 /* If we are receiving a struct value address as the first argument, set up
3070 the RTL for the function result. As this might require code to convert
3071 the transmitted address to Pmode, we do this here to ensure that possible
3072 preliminary conversions of the address have been emitted already. */
3073 if (all.function_result_decl)
3075 tree result = DECL_RESULT (current_function_decl);
3076 rtx addr = DECL_RTL (all.function_result_decl);
3077 rtx x;
3079 if (DECL_BY_REFERENCE (result))
3080 x = addr;
3081 else
3083 addr = convert_memory_address (Pmode, addr);
3084 x = gen_rtx_MEM (DECL_MODE (result), addr);
3085 set_mem_attributes (x, result, 1);
3087 SET_DECL_RTL (result, x);
3090 /* We have aligned all the args, so add space for the pretend args. */
3091 current_function_pretend_args_size = all.pretend_args_size;
3092 all.stack_args_size.constant += all.extra_pretend_bytes;
3093 current_function_args_size = all.stack_args_size.constant;
3095 /* Adjust function incoming argument size for alignment and
3096 minimum length. */
3098 #ifdef REG_PARM_STACK_SPACE
3099 current_function_args_size = MAX (current_function_args_size,
3100 REG_PARM_STACK_SPACE (fndecl));
3101 #endif
3103 current_function_args_size
3104 = ((current_function_args_size + STACK_BYTES - 1)
3105 / STACK_BYTES) * STACK_BYTES;
3107 #ifdef ARGS_GROW_DOWNWARD
3108 current_function_arg_offset_rtx
3109 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3110 : expand_expr (size_diffop (all.stack_args_size.var,
3111 size_int (-all.stack_args_size.constant)),
3112 NULL_RTX, VOIDmode, 0));
3113 #else
3114 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3115 #endif
3117 /* See how many bytes, if any, of its args a function should try to pop
3118 on return. */
3120 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3121 current_function_args_size);
3123 /* For stdarg.h function, save info about
3124 regs and stack space used by the named args. */
3126 current_function_args_info = all.args_so_far;
3128 /* Set the rtx used for the function return value. Put this in its
3129 own variable so any optimizers that need this information don't have
3130 to include tree.h. Do this here so it gets done when an inlined
3131 function gets output. */
3133 current_function_return_rtx
3134 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3135 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3137 /* If scalar return value was computed in a pseudo-reg, or was a named
3138 return value that got dumped to the stack, copy that to the hard
3139 return register. */
3140 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3142 tree decl_result = DECL_RESULT (fndecl);
3143 rtx decl_rtl = DECL_RTL (decl_result);
3145 if (REG_P (decl_rtl)
3146 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3147 : DECL_REGISTER (decl_result))
3149 rtx real_decl_rtl;
3151 #ifdef FUNCTION_OUTGOING_VALUE
3152 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3153 fndecl);
3154 #else
3155 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3156 fndecl);
3157 #endif
3158 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3159 /* The delay slot scheduler assumes that current_function_return_rtx
3160 holds the hard register containing the return value, not a
3161 temporary pseudo. */
3162 current_function_return_rtx = real_decl_rtl;
3167 /* Indicate whether REGNO is an incoming argument to the current function
3168 that was promoted to a wider mode. If so, return the RTX for the
3169 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3170 that REGNO is promoted from and whether the promotion was signed or
3171 unsigned. */
3174 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3176 tree arg;
3178 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3179 arg = TREE_CHAIN (arg))
3180 if (REG_P (DECL_INCOMING_RTL (arg))
3181 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3182 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3185 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3187 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3188 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3189 && mode != DECL_MODE (arg))
3191 *pmode = DECL_MODE (arg);
3192 *punsignedp = unsignedp;
3193 return DECL_INCOMING_RTL (arg);
3197 return 0;
3201 /* Compute the size and offset from the start of the stacked arguments for a
3202 parm passed in mode PASSED_MODE and with type TYPE.
3204 INITIAL_OFFSET_PTR points to the current offset into the stacked
3205 arguments.
3207 The starting offset and size for this parm are returned in
3208 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3209 nonzero, the offset is that of stack slot, which is returned in
3210 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3211 padding required from the initial offset ptr to the stack slot.
3213 IN_REGS is nonzero if the argument will be passed in registers. It will
3214 never be set if REG_PARM_STACK_SPACE is not defined.
3216 FNDECL is the function in which the argument was defined.
3218 There are two types of rounding that are done. The first, controlled by
3219 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3220 list to be aligned to the specific boundary (in bits). This rounding
3221 affects the initial and starting offsets, but not the argument size.
3223 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3224 optionally rounds the size of the parm to PARM_BOUNDARY. The
3225 initial offset is not affected by this rounding, while the size always
3226 is and the starting offset may be. */
3228 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3229 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3230 callers pass in the total size of args so far as
3231 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3233 void
3234 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3235 int partial, tree fndecl ATTRIBUTE_UNUSED,
3236 struct args_size *initial_offset_ptr,
3237 struct locate_and_pad_arg_data *locate)
3239 tree sizetree;
3240 enum direction where_pad;
3241 int boundary;
3242 int reg_parm_stack_space = 0;
3243 int part_size_in_regs;
3245 #ifdef REG_PARM_STACK_SPACE
3246 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3248 /* If we have found a stack parm before we reach the end of the
3249 area reserved for registers, skip that area. */
3250 if (! in_regs)
3252 if (reg_parm_stack_space > 0)
3254 if (initial_offset_ptr->var)
3256 initial_offset_ptr->var
3257 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3258 ssize_int (reg_parm_stack_space));
3259 initial_offset_ptr->constant = 0;
3261 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3262 initial_offset_ptr->constant = reg_parm_stack_space;
3265 #endif /* REG_PARM_STACK_SPACE */
3267 part_size_in_regs = 0;
3268 if (reg_parm_stack_space == 0)
3269 part_size_in_regs = ((partial * UNITS_PER_WORD)
3270 / (PARM_BOUNDARY / BITS_PER_UNIT)
3271 * (PARM_BOUNDARY / BITS_PER_UNIT));
3273 sizetree
3274 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3275 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3276 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3277 locate->where_pad = where_pad;
3279 #ifdef ARGS_GROW_DOWNWARD
3280 locate->slot_offset.constant = -initial_offset_ptr->constant;
3281 if (initial_offset_ptr->var)
3282 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3283 initial_offset_ptr->var);
3286 tree s2 = sizetree;
3287 if (where_pad != none
3288 && (!host_integerp (sizetree, 1)
3289 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3290 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3291 SUB_PARM_SIZE (locate->slot_offset, s2);
3294 locate->slot_offset.constant += part_size_in_regs;
3296 if (!in_regs
3297 #ifdef REG_PARM_STACK_SPACE
3298 || REG_PARM_STACK_SPACE (fndecl) > 0
3299 #endif
3301 pad_to_arg_alignment (&locate->slot_offset, boundary,
3302 &locate->alignment_pad);
3304 locate->size.constant = (-initial_offset_ptr->constant
3305 - locate->slot_offset.constant);
3306 if (initial_offset_ptr->var)
3307 locate->size.var = size_binop (MINUS_EXPR,
3308 size_binop (MINUS_EXPR,
3309 ssize_int (0),
3310 initial_offset_ptr->var),
3311 locate->slot_offset.var);
3313 /* Pad_below needs the pre-rounded size to know how much to pad
3314 below. */
3315 locate->offset = locate->slot_offset;
3316 if (where_pad == downward)
3317 pad_below (&locate->offset, passed_mode, sizetree);
3319 #else /* !ARGS_GROW_DOWNWARD */
3320 if (!in_regs
3321 #ifdef REG_PARM_STACK_SPACE
3322 || REG_PARM_STACK_SPACE (fndecl) > 0
3323 #endif
3325 pad_to_arg_alignment (initial_offset_ptr, boundary,
3326 &locate->alignment_pad);
3327 locate->slot_offset = *initial_offset_ptr;
3329 #ifdef PUSH_ROUNDING
3330 if (passed_mode != BLKmode)
3331 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3332 #endif
3334 /* Pad_below needs the pre-rounded size to know how much to pad below
3335 so this must be done before rounding up. */
3336 locate->offset = locate->slot_offset;
3337 if (where_pad == downward)
3338 pad_below (&locate->offset, passed_mode, sizetree);
3340 if (where_pad != none
3341 && (!host_integerp (sizetree, 1)
3342 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3343 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3345 ADD_PARM_SIZE (locate->size, sizetree);
3347 locate->size.constant -= part_size_in_regs;
3348 #endif /* ARGS_GROW_DOWNWARD */
3351 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3352 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3354 static void
3355 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3356 struct args_size *alignment_pad)
3358 tree save_var = NULL_TREE;
3359 HOST_WIDE_INT save_constant = 0;
3360 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3361 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3363 #ifdef SPARC_STACK_BOUNDARY_HACK
3364 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3365 higher than the real alignment of %sp. However, when it does this,
3366 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3367 This is a temporary hack while the sparc port is fixed. */
3368 if (SPARC_STACK_BOUNDARY_HACK)
3369 sp_offset = 0;
3370 #endif
3372 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3374 save_var = offset_ptr->var;
3375 save_constant = offset_ptr->constant;
3378 alignment_pad->var = NULL_TREE;
3379 alignment_pad->constant = 0;
3381 if (boundary > BITS_PER_UNIT)
3383 if (offset_ptr->var)
3385 tree sp_offset_tree = ssize_int (sp_offset);
3386 tree offset = size_binop (PLUS_EXPR,
3387 ARGS_SIZE_TREE (*offset_ptr),
3388 sp_offset_tree);
3389 #ifdef ARGS_GROW_DOWNWARD
3390 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3391 #else
3392 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3393 #endif
3395 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3396 /* ARGS_SIZE_TREE includes constant term. */
3397 offset_ptr->constant = 0;
3398 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3399 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3400 save_var);
3402 else
3404 offset_ptr->constant = -sp_offset +
3405 #ifdef ARGS_GROW_DOWNWARD
3406 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3407 #else
3408 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3409 #endif
3410 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3411 alignment_pad->constant = offset_ptr->constant - save_constant;
3416 static void
3417 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3419 if (passed_mode != BLKmode)
3421 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3422 offset_ptr->constant
3423 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3424 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3425 - GET_MODE_SIZE (passed_mode));
3427 else
3429 if (TREE_CODE (sizetree) != INTEGER_CST
3430 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3432 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3433 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3434 /* Add it in. */
3435 ADD_PARM_SIZE (*offset_ptr, s2);
3436 SUB_PARM_SIZE (*offset_ptr, sizetree);
3441 /* Walk the tree of blocks describing the binding levels within a function
3442 and warn about variables the might be killed by setjmp or vfork.
3443 This is done after calling flow_analysis and before global_alloc
3444 clobbers the pseudo-regs to hard regs. */
3446 void
3447 setjmp_vars_warning (tree block)
3449 tree decl, sub;
3451 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3453 if (TREE_CODE (decl) == VAR_DECL
3454 && DECL_RTL_SET_P (decl)
3455 && REG_P (DECL_RTL (decl))
3456 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3457 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
3458 decl, decl);
3461 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3462 setjmp_vars_warning (sub);
3465 /* Do the appropriate part of setjmp_vars_warning
3466 but for arguments instead of local variables. */
3468 void
3469 setjmp_args_warning (void)
3471 tree decl;
3472 for (decl = DECL_ARGUMENTS (current_function_decl);
3473 decl; decl = TREE_CHAIN (decl))
3474 if (DECL_RTL (decl) != 0
3475 && REG_P (DECL_RTL (decl))
3476 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3477 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
3478 decl, decl);
3482 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3483 and create duplicate blocks. */
3484 /* ??? Need an option to either create block fragments or to create
3485 abstract origin duplicates of a source block. It really depends
3486 on what optimization has been performed. */
3488 void
3489 reorder_blocks (void)
3491 tree block = DECL_INITIAL (current_function_decl);
3492 varray_type block_stack;
3494 if (block == NULL_TREE)
3495 return;
3497 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3499 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3500 clear_block_marks (block);
3502 /* Prune the old trees away, so that they don't get in the way. */
3503 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3504 BLOCK_CHAIN (block) = NULL_TREE;
3506 /* Recreate the block tree from the note nesting. */
3507 reorder_blocks_1 (get_insns (), block, &block_stack);
3508 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3510 /* Remove deleted blocks from the block fragment chains. */
3511 reorder_fix_fragments (block);
3514 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3516 void
3517 clear_block_marks (tree block)
3519 while (block)
3521 TREE_ASM_WRITTEN (block) = 0;
3522 clear_block_marks (BLOCK_SUBBLOCKS (block));
3523 block = BLOCK_CHAIN (block);
3527 static void
3528 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3530 rtx insn;
3532 for (insn = insns; insn; insn = NEXT_INSN (insn))
3534 if (NOTE_P (insn))
3536 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3538 tree block = NOTE_BLOCK (insn);
3540 /* If we have seen this block before, that means it now
3541 spans multiple address regions. Create a new fragment. */
3542 if (TREE_ASM_WRITTEN (block))
3544 tree new_block = copy_node (block);
3545 tree origin;
3547 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3548 ? BLOCK_FRAGMENT_ORIGIN (block)
3549 : block);
3550 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3551 BLOCK_FRAGMENT_CHAIN (new_block)
3552 = BLOCK_FRAGMENT_CHAIN (origin);
3553 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3555 NOTE_BLOCK (insn) = new_block;
3556 block = new_block;
3559 BLOCK_SUBBLOCKS (block) = 0;
3560 TREE_ASM_WRITTEN (block) = 1;
3561 /* When there's only one block for the entire function,
3562 current_block == block and we mustn't do this, it
3563 will cause infinite recursion. */
3564 if (block != current_block)
3566 BLOCK_SUPERCONTEXT (block) = current_block;
3567 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3568 BLOCK_SUBBLOCKS (current_block) = block;
3569 current_block = block;
3571 VARRAY_PUSH_TREE (*p_block_stack, block);
3573 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3575 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3576 VARRAY_POP (*p_block_stack);
3577 BLOCK_SUBBLOCKS (current_block)
3578 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3579 current_block = BLOCK_SUPERCONTEXT (current_block);
3585 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3586 appears in the block tree, select one of the fragments to become
3587 the new origin block. */
3589 static void
3590 reorder_fix_fragments (tree block)
3592 while (block)
3594 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3595 tree new_origin = NULL_TREE;
3597 if (dup_origin)
3599 if (! TREE_ASM_WRITTEN (dup_origin))
3601 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3603 /* Find the first of the remaining fragments. There must
3604 be at least one -- the current block. */
3605 while (! TREE_ASM_WRITTEN (new_origin))
3606 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3607 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3610 else if (! dup_origin)
3611 new_origin = block;
3613 /* Re-root the rest of the fragments to the new origin. In the
3614 case that DUP_ORIGIN was null, that means BLOCK was the origin
3615 of a chain of fragments and we want to remove those fragments
3616 that didn't make it to the output. */
3617 if (new_origin)
3619 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3620 tree chain = *pp;
3622 while (chain)
3624 if (TREE_ASM_WRITTEN (chain))
3626 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3627 *pp = chain;
3628 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3630 chain = BLOCK_FRAGMENT_CHAIN (chain);
3632 *pp = NULL_TREE;
3635 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3636 block = BLOCK_CHAIN (block);
3640 /* Reverse the order of elements in the chain T of blocks,
3641 and return the new head of the chain (old last element). */
3643 tree
3644 blocks_nreverse (tree t)
3646 tree prev = 0, decl, next;
3647 for (decl = t; decl; decl = next)
3649 next = BLOCK_CHAIN (decl);
3650 BLOCK_CHAIN (decl) = prev;
3651 prev = decl;
3653 return prev;
3656 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3657 non-NULL, list them all into VECTOR, in a depth-first preorder
3658 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3659 blocks. */
3661 static int
3662 all_blocks (tree block, tree *vector)
3664 int n_blocks = 0;
3666 while (block)
3668 TREE_ASM_WRITTEN (block) = 0;
3670 /* Record this block. */
3671 if (vector)
3672 vector[n_blocks] = block;
3674 ++n_blocks;
3676 /* Record the subblocks, and their subblocks... */
3677 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3678 vector ? vector + n_blocks : 0);
3679 block = BLOCK_CHAIN (block);
3682 return n_blocks;
3685 /* Return a vector containing all the blocks rooted at BLOCK. The
3686 number of elements in the vector is stored in N_BLOCKS_P. The
3687 vector is dynamically allocated; it is the caller's responsibility
3688 to call `free' on the pointer returned. */
3690 static tree *
3691 get_block_vector (tree block, int *n_blocks_p)
3693 tree *block_vector;
3695 *n_blocks_p = all_blocks (block, NULL);
3696 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3697 all_blocks (block, block_vector);
3699 return block_vector;
3702 static GTY(()) int next_block_index = 2;
3704 /* Set BLOCK_NUMBER for all the blocks in FN. */
3706 void
3707 number_blocks (tree fn)
3709 int i;
3710 int n_blocks;
3711 tree *block_vector;
3713 /* For SDB and XCOFF debugging output, we start numbering the blocks
3714 from 1 within each function, rather than keeping a running
3715 count. */
3716 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3717 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3718 next_block_index = 1;
3719 #endif
3721 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3723 /* The top-level BLOCK isn't numbered at all. */
3724 for (i = 1; i < n_blocks; ++i)
3725 /* We number the blocks from two. */
3726 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3728 free (block_vector);
3730 return;
3733 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3735 tree
3736 debug_find_var_in_block_tree (tree var, tree block)
3738 tree t;
3740 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3741 if (t == var)
3742 return block;
3744 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3746 tree ret = debug_find_var_in_block_tree (var, t);
3747 if (ret)
3748 return ret;
3751 return NULL_TREE;
3754 /* Allocate a function structure for FNDECL and set its contents
3755 to the defaults. */
3757 void
3758 allocate_struct_function (tree fndecl)
3760 tree result;
3761 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3763 cfun = ggc_alloc_cleared (sizeof (struct function));
3765 cfun->stack_alignment_needed = STACK_BOUNDARY;
3766 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3768 current_function_funcdef_no = funcdef_no++;
3770 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3772 init_eh_for_function ();
3774 lang_hooks.function.init (cfun);
3775 if (init_machine_status)
3776 cfun->machine = (*init_machine_status) ();
3778 if (fndecl == NULL)
3779 return;
3781 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3782 cfun->decl = fndecl;
3784 result = DECL_RESULT (fndecl);
3785 if (aggregate_value_p (result, fndecl))
3787 #ifdef PCC_STATIC_STRUCT_RETURN
3788 current_function_returns_pcc_struct = 1;
3789 #endif
3790 current_function_returns_struct = 1;
3793 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3795 current_function_stdarg
3796 = (fntype
3797 && TYPE_ARG_TYPES (fntype) != 0
3798 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3799 != void_type_node));
3802 /* Reset cfun, and other non-struct-function variables to defaults as
3803 appropriate for emitting rtl at the start of a function. */
3805 static void
3806 prepare_function_start (tree fndecl)
3808 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3809 cfun = DECL_STRUCT_FUNCTION (fndecl);
3810 else
3811 allocate_struct_function (fndecl);
3812 init_emit ();
3813 init_varasm_status (cfun);
3814 init_expr ();
3816 cse_not_expected = ! optimize;
3818 /* Caller save not needed yet. */
3819 caller_save_needed = 0;
3821 /* We haven't done register allocation yet. */
3822 reg_renumber = 0;
3824 /* Indicate that we have not instantiated virtual registers yet. */
3825 virtuals_instantiated = 0;
3827 /* Indicate that we want CONCATs now. */
3828 generating_concat_p = 1;
3830 /* Indicate we have no need of a frame pointer yet. */
3831 frame_pointer_needed = 0;
3834 /* Initialize the rtl expansion mechanism so that we can do simple things
3835 like generate sequences. This is used to provide a context during global
3836 initialization of some passes. */
3837 void
3838 init_dummy_function_start (void)
3840 prepare_function_start (NULL);
3843 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3844 and initialize static variables for generating RTL for the statements
3845 of the function. */
3847 void
3848 init_function_start (tree subr)
3850 prepare_function_start (subr);
3852 /* Prevent ever trying to delete the first instruction of a
3853 function. Also tell final how to output a linenum before the
3854 function prologue. Note linenums could be missing, e.g. when
3855 compiling a Java .class file. */
3856 if (! DECL_IS_BUILTIN (subr))
3857 emit_line_note (DECL_SOURCE_LOCATION (subr));
3859 /* Make sure first insn is a note even if we don't want linenums.
3860 This makes sure the first insn will never be deleted.
3861 Also, final expects a note to appear there. */
3862 emit_note (NOTE_INSN_DELETED);
3864 /* Warn if this value is an aggregate type,
3865 regardless of which calling convention we are using for it. */
3866 if (warn_aggregate_return
3867 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3868 warning ("function returns an aggregate");
3871 /* Make sure all values used by the optimization passes have sane
3872 defaults. */
3873 void
3874 init_function_for_compilation (void)
3876 reg_renumber = 0;
3878 /* No prologue/epilogue insns yet. */
3879 VARRAY_GROW (prologue, 0);
3880 VARRAY_GROW (epilogue, 0);
3881 VARRAY_GROW (sibcall_epilogue, 0);
3884 /* Expand a call to __main at the beginning of a possible main function. */
3886 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3887 #undef HAS_INIT_SECTION
3888 #define HAS_INIT_SECTION
3889 #endif
3891 void
3892 expand_main_function (void)
3894 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3895 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3897 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
3898 rtx tmp, seq;
3900 start_sequence ();
3901 /* Forcibly align the stack. */
3902 #ifdef STACK_GROWS_DOWNWARD
3903 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3904 stack_pointer_rtx, 1, OPTAB_WIDEN);
3905 #else
3906 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3907 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3908 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3909 stack_pointer_rtx, 1, OPTAB_WIDEN);
3910 #endif
3911 if (tmp != stack_pointer_rtx)
3912 emit_move_insn (stack_pointer_rtx, tmp);
3914 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3915 tmp = force_reg (Pmode, const0_rtx);
3916 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
3917 seq = get_insns ();
3918 end_sequence ();
3920 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3921 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3922 break;
3923 if (tmp)
3924 emit_insn_before (seq, tmp);
3925 else
3926 emit_insn (seq);
3928 #endif
3930 #ifndef HAS_INIT_SECTION
3931 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3932 #endif
3935 /* The PENDING_SIZES represent the sizes of variable-sized types.
3936 Create RTL for the various sizes now (using temporary variables),
3937 so that we can refer to the sizes from the RTL we are generating
3938 for the current function. The PENDING_SIZES are a TREE_LIST. The
3939 TREE_VALUE of each node is a SAVE_EXPR. */
3941 void
3942 expand_pending_sizes (tree pending_sizes)
3944 tree tem;
3946 /* Evaluate now the sizes of any types declared among the arguments. */
3947 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
3948 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
3951 /* Start the RTL for a new function, and set variables used for
3952 emitting RTL.
3953 SUBR is the FUNCTION_DECL node.
3954 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3955 the function's parameters, which must be run at any return statement. */
3957 void
3958 expand_function_start (tree subr)
3960 /* Make sure volatile mem refs aren't considered
3961 valid operands of arithmetic insns. */
3962 init_recog_no_volatile ();
3964 current_function_profile
3965 = (profile_flag
3966 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3968 current_function_limit_stack
3969 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3971 /* Make the label for return statements to jump to. Do not special
3972 case machines with special return instructions -- they will be
3973 handled later during jump, ifcvt, or epilogue creation. */
3974 return_label = gen_label_rtx ();
3976 /* Initialize rtx used to return the value. */
3977 /* Do this before assign_parms so that we copy the struct value address
3978 before any library calls that assign parms might generate. */
3980 /* Decide whether to return the value in memory or in a register. */
3981 if (aggregate_value_p (DECL_RESULT (subr), subr))
3983 /* Returning something that won't go in a register. */
3984 rtx value_address = 0;
3986 #ifdef PCC_STATIC_STRUCT_RETURN
3987 if (current_function_returns_pcc_struct)
3989 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3990 value_address = assemble_static_space (size);
3992 else
3993 #endif
3995 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
3996 /* Expect to be passed the address of a place to store the value.
3997 If it is passed as an argument, assign_parms will take care of
3998 it. */
3999 if (sv)
4001 value_address = gen_reg_rtx (Pmode);
4002 emit_move_insn (value_address, sv);
4005 if (value_address)
4007 rtx x = value_address;
4008 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4010 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4011 set_mem_attributes (x, DECL_RESULT (subr), 1);
4013 SET_DECL_RTL (DECL_RESULT (subr), x);
4016 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4017 /* If return mode is void, this decl rtl should not be used. */
4018 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4019 else
4021 /* Compute the return values into a pseudo reg, which we will copy
4022 into the true return register after the cleanups are done. */
4024 /* In order to figure out what mode to use for the pseudo, we
4025 figure out what the mode of the eventual return register will
4026 actually be, and use that. */
4027 rtx hard_reg
4028 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
4029 subr, 1);
4031 /* Structures that are returned in registers are not aggregate_value_p,
4032 so we may see a PARALLEL or a REG. */
4033 if (REG_P (hard_reg))
4034 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
4035 else
4037 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4038 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4041 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4042 result to the real return register(s). */
4043 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4046 /* Initialize rtx for parameters and local variables.
4047 In some cases this requires emitting insns. */
4048 assign_parms (subr);
4050 /* If function gets a static chain arg, store it. */
4051 if (cfun->static_chain_decl)
4053 tree parm = cfun->static_chain_decl;
4054 rtx local = gen_reg_rtx (Pmode);
4056 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4057 SET_DECL_RTL (parm, local);
4058 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4060 emit_move_insn (local, static_chain_incoming_rtx);
4063 /* If the function receives a non-local goto, then store the
4064 bits we need to restore the frame pointer. */
4065 if (cfun->nonlocal_goto_save_area)
4067 tree t_save;
4068 rtx r_save;
4070 /* ??? We need to do this save early. Unfortunately here is
4071 before the frame variable gets declared. Help out... */
4072 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4074 t_save = build4 (ARRAY_REF, ptr_type_node,
4075 cfun->nonlocal_goto_save_area,
4076 integer_zero_node, NULL_TREE, NULL_TREE);
4077 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4078 r_save = convert_memory_address (Pmode, r_save);
4080 emit_move_insn (r_save, virtual_stack_vars_rtx);
4081 update_nonlocal_goto_save_area ();
4084 /* The following was moved from init_function_start.
4085 The move is supposed to make sdb output more accurate. */
4086 /* Indicate the beginning of the function body,
4087 as opposed to parm setup. */
4088 emit_note (NOTE_INSN_FUNCTION_BEG);
4090 if (!NOTE_P (get_last_insn ()))
4091 emit_note (NOTE_INSN_DELETED);
4092 parm_birth_insn = get_last_insn ();
4094 if (current_function_profile)
4096 #ifdef PROFILE_HOOK
4097 PROFILE_HOOK (current_function_funcdef_no);
4098 #endif
4101 /* After the display initializations is where the tail-recursion label
4102 should go, if we end up needing one. Ensure we have a NOTE here
4103 since some things (like trampolines) get placed before this. */
4104 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4106 /* Evaluate now the sizes of any types declared among the arguments. */
4107 expand_pending_sizes (nreverse (get_pending_sizes ()));
4109 /* Make sure there is a line number after the function entry setup code. */
4110 force_next_line_note ();
4113 /* Undo the effects of init_dummy_function_start. */
4114 void
4115 expand_dummy_function_end (void)
4117 /* End any sequences that failed to be closed due to syntax errors. */
4118 while (in_sequence_p ())
4119 end_sequence ();
4121 /* Outside function body, can't compute type's actual size
4122 until next function's body starts. */
4124 free_after_parsing (cfun);
4125 free_after_compilation (cfun);
4126 cfun = 0;
4129 /* Call DOIT for each hard register used as a return value from
4130 the current function. */
4132 void
4133 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4135 rtx outgoing = current_function_return_rtx;
4137 if (! outgoing)
4138 return;
4140 if (REG_P (outgoing))
4141 (*doit) (outgoing, arg);
4142 else if (GET_CODE (outgoing) == PARALLEL)
4144 int i;
4146 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4148 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4150 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4151 (*doit) (x, arg);
4156 static void
4157 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4159 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4162 void
4163 clobber_return_register (void)
4165 diddle_return_value (do_clobber_return_reg, NULL);
4167 /* In case we do use pseudo to return value, clobber it too. */
4168 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4170 tree decl_result = DECL_RESULT (current_function_decl);
4171 rtx decl_rtl = DECL_RTL (decl_result);
4172 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4174 do_clobber_return_reg (decl_rtl, NULL);
4179 static void
4180 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4182 emit_insn (gen_rtx_USE (VOIDmode, reg));
4185 void
4186 use_return_register (void)
4188 diddle_return_value (do_use_return_reg, NULL);
4191 /* Possibly warn about unused parameters. */
4192 void
4193 do_warn_unused_parameter (tree fn)
4195 tree decl;
4197 for (decl = DECL_ARGUMENTS (fn);
4198 decl; decl = TREE_CHAIN (decl))
4199 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4200 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4201 warning ("%Junused parameter '%D'", decl, decl);
4204 static GTY(()) rtx initial_trampoline;
4206 /* Generate RTL for the end of the current function. */
4208 void
4209 expand_function_end (void)
4211 rtx clobber_after;
4213 /* If arg_pointer_save_area was referenced only from a nested
4214 function, we will not have initialized it yet. Do that now. */
4215 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4216 get_arg_pointer_save_area (cfun);
4218 /* If we are doing stack checking and this function makes calls,
4219 do a stack probe at the start of the function to ensure we have enough
4220 space for another stack frame. */
4221 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4223 rtx insn, seq;
4225 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4226 if (CALL_P (insn))
4228 start_sequence ();
4229 probe_stack_range (STACK_CHECK_PROTECT,
4230 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4231 seq = get_insns ();
4232 end_sequence ();
4233 emit_insn_before (seq, tail_recursion_reentry);
4234 break;
4238 /* Possibly warn about unused parameters.
4239 When frontend does unit-at-a-time, the warning is already
4240 issued at finalization time. */
4241 if (warn_unused_parameter
4242 && !lang_hooks.callgraph.expand_function)
4243 do_warn_unused_parameter (current_function_decl);
4245 /* End any sequences that failed to be closed due to syntax errors. */
4246 while (in_sequence_p ())
4247 end_sequence ();
4249 clear_pending_stack_adjust ();
4250 do_pending_stack_adjust ();
4252 /* @@@ This is a kludge. We want to ensure that instructions that
4253 may trap are not moved into the epilogue by scheduling, because
4254 we don't always emit unwind information for the epilogue.
4255 However, not all machine descriptions define a blockage insn, so
4256 emit an ASM_INPUT to act as one. */
4257 if (flag_non_call_exceptions)
4258 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4260 /* Mark the end of the function body.
4261 If control reaches this insn, the function can drop through
4262 without returning a value. */
4263 emit_note (NOTE_INSN_FUNCTION_END);
4265 /* Must mark the last line number note in the function, so that the test
4266 coverage code can avoid counting the last line twice. This just tells
4267 the code to ignore the immediately following line note, since there
4268 already exists a copy of this note somewhere above. This line number
4269 note is still needed for debugging though, so we can't delete it. */
4270 if (flag_test_coverage)
4271 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4273 /* Output a linenumber for the end of the function.
4274 SDB depends on this. */
4275 force_next_line_note ();
4276 emit_line_note (input_location);
4278 /* Before the return label (if any), clobber the return
4279 registers so that they are not propagated live to the rest of
4280 the function. This can only happen with functions that drop
4281 through; if there had been a return statement, there would
4282 have either been a return rtx, or a jump to the return label.
4284 We delay actual code generation after the current_function_value_rtx
4285 is computed. */
4286 clobber_after = get_last_insn ();
4288 /* Output the label for the actual return from the function,
4289 if one is expected. This happens either because a function epilogue
4290 is used instead of a return instruction, or because a return was done
4291 with a goto in order to run local cleanups, or because of pcc-style
4292 structure returning. */
4293 if (return_label)
4294 emit_label (return_label);
4296 /* Let except.c know where it should emit the call to unregister
4297 the function context for sjlj exceptions. */
4298 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4299 sjlj_emit_function_exit_after (get_last_insn ());
4301 /* If we had calls to alloca, and this machine needs
4302 an accurate stack pointer to exit the function,
4303 insert some code to save and restore the stack pointer. */
4304 if (! EXIT_IGNORE_STACK
4305 && current_function_calls_alloca)
4307 rtx tem = 0;
4309 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4310 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4313 /* If scalar return value was computed in a pseudo-reg, or was a named
4314 return value that got dumped to the stack, copy that to the hard
4315 return register. */
4316 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4318 tree decl_result = DECL_RESULT (current_function_decl);
4319 rtx decl_rtl = DECL_RTL (decl_result);
4321 if (REG_P (decl_rtl)
4322 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4323 : DECL_REGISTER (decl_result))
4325 rtx real_decl_rtl = current_function_return_rtx;
4327 /* This should be set in assign_parms. */
4328 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4330 /* If this is a BLKmode structure being returned in registers,
4331 then use the mode computed in expand_return. Note that if
4332 decl_rtl is memory, then its mode may have been changed,
4333 but that current_function_return_rtx has not. */
4334 if (GET_MODE (real_decl_rtl) == BLKmode)
4335 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4337 /* If a named return value dumped decl_return to memory, then
4338 we may need to re-do the PROMOTE_MODE signed/unsigned
4339 extension. */
4340 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4342 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4344 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4345 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4346 &unsignedp, 1);
4348 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4350 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4352 /* If expand_function_start has created a PARALLEL for decl_rtl,
4353 move the result to the real return registers. Otherwise, do
4354 a group load from decl_rtl for a named return. */
4355 if (GET_CODE (decl_rtl) == PARALLEL)
4356 emit_group_move (real_decl_rtl, decl_rtl);
4357 else
4358 emit_group_load (real_decl_rtl, decl_rtl,
4359 TREE_TYPE (decl_result),
4360 int_size_in_bytes (TREE_TYPE (decl_result)));
4362 else
4363 emit_move_insn (real_decl_rtl, decl_rtl);
4367 /* If returning a structure, arrange to return the address of the value
4368 in a place where debuggers expect to find it.
4370 If returning a structure PCC style,
4371 the caller also depends on this value.
4372 And current_function_returns_pcc_struct is not necessarily set. */
4373 if (current_function_returns_struct
4374 || current_function_returns_pcc_struct)
4376 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4377 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4378 rtx outgoing;
4380 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4381 type = TREE_TYPE (type);
4382 else
4383 value_address = XEXP (value_address, 0);
4385 #ifdef FUNCTION_OUTGOING_VALUE
4386 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4387 current_function_decl);
4388 #else
4389 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4390 current_function_decl);
4391 #endif
4393 /* Mark this as a function return value so integrate will delete the
4394 assignment and USE below when inlining this function. */
4395 REG_FUNCTION_VALUE_P (outgoing) = 1;
4397 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4398 value_address = convert_memory_address (GET_MODE (outgoing),
4399 value_address);
4401 emit_move_insn (outgoing, value_address);
4403 /* Show return register used to hold result (in this case the address
4404 of the result. */
4405 current_function_return_rtx = outgoing;
4408 /* If this is an implementation of throw, do what's necessary to
4409 communicate between __builtin_eh_return and the epilogue. */
4410 expand_eh_return ();
4412 /* Emit the actual code to clobber return register. */
4414 rtx seq;
4416 start_sequence ();
4417 clobber_return_register ();
4418 expand_naked_return ();
4419 seq = get_insns ();
4420 end_sequence ();
4422 emit_insn_after (seq, clobber_after);
4425 /* Output the label for the naked return from the function. */
4426 emit_label (naked_return_label);
4428 /* ??? This should no longer be necessary since stupid is no longer with
4429 us, but there are some parts of the compiler (eg reload_combine, and
4430 sh mach_dep_reorg) that still try and compute their own lifetime info
4431 instead of using the general framework. */
4432 use_return_register ();
4436 get_arg_pointer_save_area (struct function *f)
4438 rtx ret = f->x_arg_pointer_save_area;
4440 if (! ret)
4442 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4443 f->x_arg_pointer_save_area = ret;
4446 if (f == cfun && ! f->arg_pointer_save_area_init)
4448 rtx seq;
4450 /* Save the arg pointer at the beginning of the function. The
4451 generated stack slot may not be a valid memory address, so we
4452 have to check it and fix it if necessary. */
4453 start_sequence ();
4454 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4455 seq = get_insns ();
4456 end_sequence ();
4458 push_topmost_sequence ();
4459 emit_insn_after (seq, get_insns ());
4460 pop_topmost_sequence ();
4463 return ret;
4466 /* Extend a vector that records the INSN_UIDs of INSNS
4467 (a list of one or more insns). */
4469 static void
4470 record_insns (rtx insns, varray_type *vecp)
4472 int i, len;
4473 rtx tmp;
4475 tmp = insns;
4476 len = 0;
4477 while (tmp != NULL_RTX)
4479 len++;
4480 tmp = NEXT_INSN (tmp);
4483 i = VARRAY_SIZE (*vecp);
4484 VARRAY_GROW (*vecp, i + len);
4485 tmp = insns;
4486 while (tmp != NULL_RTX)
4488 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4489 i++;
4490 tmp = NEXT_INSN (tmp);
4494 /* Set the locator of the insn chain starting at INSN to LOC. */
4495 static void
4496 set_insn_locators (rtx insn, int loc)
4498 while (insn != NULL_RTX)
4500 if (INSN_P (insn))
4501 INSN_LOCATOR (insn) = loc;
4502 insn = NEXT_INSN (insn);
4506 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4507 be running after reorg, SEQUENCE rtl is possible. */
4509 static int
4510 contains (rtx insn, varray_type vec)
4512 int i, j;
4514 if (NONJUMP_INSN_P (insn)
4515 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4517 int count = 0;
4518 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4519 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4520 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4521 count++;
4522 return count;
4524 else
4526 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4527 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4528 return 1;
4530 return 0;
4534 prologue_epilogue_contains (rtx insn)
4536 if (contains (insn, prologue))
4537 return 1;
4538 if (contains (insn, epilogue))
4539 return 1;
4540 return 0;
4544 sibcall_epilogue_contains (rtx insn)
4546 if (sibcall_epilogue)
4547 return contains (insn, sibcall_epilogue);
4548 return 0;
4551 #ifdef HAVE_return
4552 /* Insert gen_return at the end of block BB. This also means updating
4553 block_for_insn appropriately. */
4555 static void
4556 emit_return_into_block (basic_block bb, rtx line_note)
4558 emit_jump_insn_after (gen_return (), BB_END (bb));
4559 if (line_note)
4560 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4562 #endif /* HAVE_return */
4564 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4566 /* These functions convert the epilogue into a variant that does not modify the
4567 stack pointer. This is used in cases where a function returns an object
4568 whose size is not known until it is computed. The called function leaves the
4569 object on the stack, leaves the stack depressed, and returns a pointer to
4570 the object.
4572 What we need to do is track all modifications and references to the stack
4573 pointer, deleting the modifications and changing the references to point to
4574 the location the stack pointer would have pointed to had the modifications
4575 taken place.
4577 These functions need to be portable so we need to make as few assumptions
4578 about the epilogue as we can. However, the epilogue basically contains
4579 three things: instructions to reset the stack pointer, instructions to
4580 reload registers, possibly including the frame pointer, and an
4581 instruction to return to the caller.
4583 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4584 We also make no attempt to validate the insns we make since if they are
4585 invalid, we probably can't do anything valid. The intent is that these
4586 routines get "smarter" as more and more machines start to use them and
4587 they try operating on different epilogues.
4589 We use the following structure to track what the part of the epilogue that
4590 we've already processed has done. We keep two copies of the SP equivalence,
4591 one for use during the insn we are processing and one for use in the next
4592 insn. The difference is because one part of a PARALLEL may adjust SP
4593 and the other may use it. */
4595 struct epi_info
4597 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4598 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4599 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4600 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4601 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4602 should be set to once we no longer need
4603 its value. */
4604 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4605 for registers. */
4608 static void handle_epilogue_set (rtx, struct epi_info *);
4609 static void update_epilogue_consts (rtx, rtx, void *);
4610 static void emit_equiv_load (struct epi_info *);
4612 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4613 no modifications to the stack pointer. Return the new list of insns. */
4615 static rtx
4616 keep_stack_depressed (rtx insns)
4618 int j;
4619 struct epi_info info;
4620 rtx insn, next;
4622 /* If the epilogue is just a single instruction, it must be OK as is. */
4623 if (NEXT_INSN (insns) == NULL_RTX)
4624 return insns;
4626 /* Otherwise, start a sequence, initialize the information we have, and
4627 process all the insns we were given. */
4628 start_sequence ();
4630 info.sp_equiv_reg = stack_pointer_rtx;
4631 info.sp_offset = 0;
4632 info.equiv_reg_src = 0;
4634 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4635 info.const_equiv[j] = 0;
4637 insn = insns;
4638 next = NULL_RTX;
4639 while (insn != NULL_RTX)
4641 next = NEXT_INSN (insn);
4643 if (!INSN_P (insn))
4645 add_insn (insn);
4646 insn = next;
4647 continue;
4650 /* If this insn references the register that SP is equivalent to and
4651 we have a pending load to that register, we must force out the load
4652 first and then indicate we no longer know what SP's equivalent is. */
4653 if (info.equiv_reg_src != 0
4654 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4656 emit_equiv_load (&info);
4657 info.sp_equiv_reg = 0;
4660 info.new_sp_equiv_reg = info.sp_equiv_reg;
4661 info.new_sp_offset = info.sp_offset;
4663 /* If this is a (RETURN) and the return address is on the stack,
4664 update the address and change to an indirect jump. */
4665 if (GET_CODE (PATTERN (insn)) == RETURN
4666 || (GET_CODE (PATTERN (insn)) == PARALLEL
4667 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4669 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4670 rtx base = 0;
4671 HOST_WIDE_INT offset = 0;
4672 rtx jump_insn, jump_set;
4674 /* If the return address is in a register, we can emit the insn
4675 unchanged. Otherwise, it must be a MEM and we see what the
4676 base register and offset are. In any case, we have to emit any
4677 pending load to the equivalent reg of SP, if any. */
4678 if (REG_P (retaddr))
4680 emit_equiv_load (&info);
4681 add_insn (insn);
4682 insn = next;
4683 continue;
4685 else
4687 rtx ret_ptr;
4688 gcc_assert (MEM_P (retaddr));
4690 ret_ptr = XEXP (retaddr, 0);
4692 if (REG_P (ret_ptr))
4694 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4695 offset = 0;
4697 else
4699 gcc_assert (GET_CODE (ret_ptr) == PLUS
4700 && REG_P (XEXP (ret_ptr, 0))
4701 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4702 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4703 offset = INTVAL (XEXP (ret_ptr, 1));
4707 /* If the base of the location containing the return pointer
4708 is SP, we must update it with the replacement address. Otherwise,
4709 just build the necessary MEM. */
4710 retaddr = plus_constant (base, offset);
4711 if (base == stack_pointer_rtx)
4712 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4713 plus_constant (info.sp_equiv_reg,
4714 info.sp_offset));
4716 retaddr = gen_rtx_MEM (Pmode, retaddr);
4718 /* If there is a pending load to the equivalent register for SP
4719 and we reference that register, we must load our address into
4720 a scratch register and then do that load. */
4721 if (info.equiv_reg_src
4722 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4724 unsigned int regno;
4725 rtx reg;
4727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4728 if (HARD_REGNO_MODE_OK (regno, Pmode)
4729 && !fixed_regs[regno]
4730 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4731 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4732 regno)
4733 && !refers_to_regno_p (regno,
4734 regno + hard_regno_nregs[regno]
4735 [Pmode],
4736 info.equiv_reg_src, NULL)
4737 && info.const_equiv[regno] == 0)
4738 break;
4740 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4742 reg = gen_rtx_REG (Pmode, regno);
4743 emit_move_insn (reg, retaddr);
4744 retaddr = reg;
4747 emit_equiv_load (&info);
4748 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4750 /* Show the SET in the above insn is a RETURN. */
4751 jump_set = single_set (jump_insn);
4752 gcc_assert (jump_set);
4753 SET_IS_RETURN_P (jump_set) = 1;
4756 /* If SP is not mentioned in the pattern and its equivalent register, if
4757 any, is not modified, just emit it. Otherwise, if neither is set,
4758 replace the reference to SP and emit the insn. If none of those are
4759 true, handle each SET individually. */
4760 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4761 && (info.sp_equiv_reg == stack_pointer_rtx
4762 || !reg_set_p (info.sp_equiv_reg, insn)))
4763 add_insn (insn);
4764 else if (! reg_set_p (stack_pointer_rtx, insn)
4765 && (info.sp_equiv_reg == stack_pointer_rtx
4766 || !reg_set_p (info.sp_equiv_reg, insn)))
4768 int changed;
4770 changed = validate_replace_rtx (stack_pointer_rtx,
4771 plus_constant (info.sp_equiv_reg,
4772 info.sp_offset),
4773 insn);
4774 gcc_assert (changed);
4776 add_insn (insn);
4778 else if (GET_CODE (PATTERN (insn)) == SET)
4779 handle_epilogue_set (PATTERN (insn), &info);
4780 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4782 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4783 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4784 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4786 else
4787 add_insn (insn);
4789 info.sp_equiv_reg = info.new_sp_equiv_reg;
4790 info.sp_offset = info.new_sp_offset;
4792 /* Now update any constants this insn sets. */
4793 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4794 insn = next;
4797 insns = get_insns ();
4798 end_sequence ();
4799 return insns;
4802 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4803 structure that contains information about what we've seen so far. We
4804 process this SET by either updating that data or by emitting one or
4805 more insns. */
4807 static void
4808 handle_epilogue_set (rtx set, struct epi_info *p)
4810 /* First handle the case where we are setting SP. Record what it is being
4811 set from. If unknown, abort. */
4812 if (reg_set_p (stack_pointer_rtx, set))
4814 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4816 if (GET_CODE (SET_SRC (set)) == PLUS)
4818 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4819 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4820 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4821 else
4823 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4824 && (REGNO (XEXP (SET_SRC (set), 1))
4825 < FIRST_PSEUDO_REGISTER)
4826 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4827 p->new_sp_offset
4828 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4831 else
4832 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4834 /* If we are adjusting SP, we adjust from the old data. */
4835 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4837 p->new_sp_equiv_reg = p->sp_equiv_reg;
4838 p->new_sp_offset += p->sp_offset;
4841 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4843 return;
4846 /* Next handle the case where we are setting SP's equivalent register.
4847 If we already have a value to set it to, abort. We could update, but
4848 there seems little point in handling that case. Note that we have
4849 to allow for the case where we are setting the register set in
4850 the previous part of a PARALLEL inside a single insn. But use the
4851 old offset for any updates within this insn. We must allow for the case
4852 where the register is being set in a different (usually wider) mode than
4853 Pmode). */
4854 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4856 gcc_assert (!p->equiv_reg_src
4857 && REG_P (p->new_sp_equiv_reg)
4858 && REG_P (SET_DEST (set))
4859 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4860 <= BITS_PER_WORD)
4861 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4862 p->equiv_reg_src
4863 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4864 plus_constant (p->sp_equiv_reg,
4865 p->sp_offset));
4868 /* Otherwise, replace any references to SP in the insn to its new value
4869 and emit the insn. */
4870 else
4872 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4873 plus_constant (p->sp_equiv_reg,
4874 p->sp_offset));
4875 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4876 plus_constant (p->sp_equiv_reg,
4877 p->sp_offset));
4878 emit_insn (set);
4882 /* Update the tracking information for registers set to constants. */
4884 static void
4885 update_epilogue_consts (rtx dest, rtx x, void *data)
4887 struct epi_info *p = (struct epi_info *) data;
4888 rtx new;
4890 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4891 return;
4893 /* If we are either clobbering a register or doing a partial set,
4894 show we don't know the value. */
4895 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4896 p->const_equiv[REGNO (dest)] = 0;
4898 /* If we are setting it to a constant, record that constant. */
4899 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4900 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4902 /* If this is a binary operation between a register we have been tracking
4903 and a constant, see if we can compute a new constant value. */
4904 else if (ARITHMETIC_P (SET_SRC (x))
4905 && REG_P (XEXP (SET_SRC (x), 0))
4906 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4907 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4908 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4909 && 0 != (new = simplify_binary_operation
4910 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4911 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4912 XEXP (SET_SRC (x), 1)))
4913 && GET_CODE (new) == CONST_INT)
4914 p->const_equiv[REGNO (dest)] = new;
4916 /* Otherwise, we can't do anything with this value. */
4917 else
4918 p->const_equiv[REGNO (dest)] = 0;
4921 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4923 static void
4924 emit_equiv_load (struct epi_info *p)
4926 if (p->equiv_reg_src != 0)
4928 rtx dest = p->sp_equiv_reg;
4930 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4931 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4932 REGNO (p->sp_equiv_reg));
4934 emit_move_insn (dest, p->equiv_reg_src);
4935 p->equiv_reg_src = 0;
4938 #endif
4940 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4941 this into place with notes indicating where the prologue ends and where
4942 the epilogue begins. Update the basic block information when possible. */
4944 void
4945 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
4947 int inserted = 0;
4948 edge e;
4949 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4950 rtx seq;
4951 #endif
4952 #ifdef HAVE_prologue
4953 rtx prologue_end = NULL_RTX;
4954 #endif
4955 #if defined (HAVE_epilogue) || defined(HAVE_return)
4956 rtx epilogue_end = NULL_RTX;
4957 #endif
4958 #ifdef HAVE_return
4959 unsigned ix;
4960 #endif
4961 edge_iterator ei;
4963 #ifdef HAVE_prologue
4964 if (HAVE_prologue)
4966 start_sequence ();
4967 seq = gen_prologue ();
4968 emit_insn (seq);
4970 /* Retain a map of the prologue insns. */
4971 record_insns (seq, &prologue);
4972 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
4974 seq = get_insns ();
4975 end_sequence ();
4976 set_insn_locators (seq, prologue_locator);
4978 /* Can't deal with multiple successors of the entry block
4979 at the moment. Function should always have at least one
4980 entry point. */
4981 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4983 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
4984 inserted = 1;
4986 #endif
4988 /* If the exit block has no non-fake predecessors, we don't need
4989 an epilogue. */
4990 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4992 if ((e->flags & EDGE_FAKE) == 0)
4993 break;
4996 if (e == NULL)
4997 goto epilogue_done;
4999 #ifdef HAVE_return
5000 if (optimize && HAVE_return)
5002 /* If we're allowed to generate a simple return instruction,
5003 then by definition we don't need a full epilogue. Examine
5004 the block that falls through to EXIT. If it does not
5005 contain any code, examine its predecessors and try to
5006 emit (conditional) return instructions. */
5008 basic_block last;
5009 rtx label;
5011 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5013 if (e->flags & EDGE_FALLTHRU)
5014 break;
5017 if (e == NULL)
5018 goto epilogue_done;
5019 last = e->src;
5021 /* Verify that there are no active instructions in the last block. */
5022 label = BB_END (last);
5023 while (label && !LABEL_P (label))
5025 if (active_insn_p (label))
5026 break;
5027 label = PREV_INSN (label);
5030 if (BB_HEAD (last) == label && LABEL_P (label))
5032 unsigned ix;
5033 rtx epilogue_line_note = NULL_RTX;
5035 /* Locate the line number associated with the closing brace,
5036 if we can find one. */
5037 for (seq = get_last_insn ();
5038 seq && ! active_insn_p (seq);
5039 seq = PREV_INSN (seq))
5040 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5042 epilogue_line_note = seq;
5043 break;
5046 for (ix = 0; VEC_iterate (edge, last->preds, ix, e); )
5048 basic_block bb = e->src;
5049 rtx jump;
5051 if (bb == ENTRY_BLOCK_PTR)
5053 ix++;
5054 continue;
5057 jump = BB_END (bb);
5058 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5060 ix++;
5061 continue;
5064 /* If we have an unconditional jump, we can replace that
5065 with a simple return instruction. */
5066 if (simplejump_p (jump))
5068 emit_return_into_block (bb, epilogue_line_note);
5069 delete_insn (jump);
5072 /* If we have a conditional jump, we can try to replace
5073 that with a conditional return instruction. */
5074 else if (condjump_p (jump))
5076 if (! redirect_jump (jump, 0, 0))
5078 ix++;
5079 continue;
5082 /* If this block has only one successor, it both jumps
5083 and falls through to the fallthru block, so we can't
5084 delete the edge. */
5085 if (EDGE_COUNT (bb->succs) == 1)
5087 ix++;
5088 continue;
5091 else
5093 ix++;
5094 continue;
5097 /* Fix up the CFG for the successful change we just made. */
5098 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5101 /* Emit a return insn for the exit fallthru block. Whether
5102 this is still reachable will be determined later. */
5104 emit_barrier_after (BB_END (last));
5105 emit_return_into_block (last, epilogue_line_note);
5106 epilogue_end = BB_END (last);
5107 EDGE_SUCC (last, 0)->flags &= ~EDGE_FALLTHRU;
5108 goto epilogue_done;
5111 #endif
5112 /* Find the edge that falls through to EXIT. Other edges may exist
5113 due to RETURN instructions, but those don't need epilogues.
5114 There really shouldn't be a mixture -- either all should have
5115 been converted or none, however... */
5117 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5119 if (e->flags & EDGE_FALLTHRU)
5120 break;
5123 if (e == NULL)
5124 goto epilogue_done;
5126 #ifdef HAVE_epilogue
5127 if (HAVE_epilogue)
5129 start_sequence ();
5130 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5132 seq = gen_epilogue ();
5134 #ifdef INCOMING_RETURN_ADDR_RTX
5135 /* If this function returns with the stack depressed and we can support
5136 it, massage the epilogue to actually do that. */
5137 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5138 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5139 seq = keep_stack_depressed (seq);
5140 #endif
5142 emit_jump_insn (seq);
5144 /* Retain a map of the epilogue insns. */
5145 record_insns (seq, &epilogue);
5146 set_insn_locators (seq, epilogue_locator);
5148 seq = get_insns ();
5149 end_sequence ();
5151 insert_insn_on_edge (seq, e);
5152 inserted = 1;
5154 else
5155 #endif
5157 basic_block cur_bb;
5159 if (! next_active_insn (BB_END (e->src)))
5160 goto epilogue_done;
5161 /* We have a fall-through edge to the exit block, the source is not
5162 at the end of the function, and there will be an assembler epilogue
5163 at the end of the function.
5164 We can't use force_nonfallthru here, because that would try to
5165 use return. Inserting a jump 'by hand' is extremely messy, so
5166 we take advantage of cfg_layout_finalize using
5167 fixup_fallthru_exit_predecessor. */
5168 cfg_layout_initialize (0);
5169 FOR_EACH_BB (cur_bb)
5170 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5171 cur_bb->rbi->next = cur_bb->next_bb;
5172 cfg_layout_finalize ();
5174 epilogue_done:
5176 if (inserted)
5177 commit_edge_insertions ();
5179 #ifdef HAVE_sibcall_epilogue
5180 /* Emit sibling epilogues before any sibling call sites. */
5182 for (ix = 0; VEC_iterate (edge, EXIT_BLOCK_PTR->preds, ix, e); )
5184 basic_block bb = e->src;
5185 rtx insn = BB_END (bb);
5186 rtx i;
5187 rtx newinsn;
5189 if (!CALL_P (insn)
5190 || ! SIBLING_CALL_P (insn))
5192 ix++;
5193 continue;
5196 start_sequence ();
5197 emit_insn (gen_sibcall_epilogue ());
5198 seq = get_insns ();
5199 end_sequence ();
5201 /* Retain a map of the epilogue insns. Used in life analysis to
5202 avoid getting rid of sibcall epilogue insns. Do this before we
5203 actually emit the sequence. */
5204 record_insns (seq, &sibcall_epilogue);
5205 set_insn_locators (seq, epilogue_locator);
5207 i = PREV_INSN (insn);
5208 newinsn = emit_insn_before (seq, insn);
5210 ix++;
5212 #endif
5214 #ifdef HAVE_prologue
5215 /* This is probably all useless now that we use locators. */
5216 if (prologue_end)
5218 rtx insn, prev;
5220 /* GDB handles `break f' by setting a breakpoint on the first
5221 line note after the prologue. Which means (1) that if
5222 there are line number notes before where we inserted the
5223 prologue we should move them, and (2) we should generate a
5224 note before the end of the first basic block, if there isn't
5225 one already there.
5227 ??? This behavior is completely broken when dealing with
5228 multiple entry functions. We simply place the note always
5229 into first basic block and let alternate entry points
5230 to be missed.
5233 for (insn = prologue_end; insn; insn = prev)
5235 prev = PREV_INSN (insn);
5236 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5238 /* Note that we cannot reorder the first insn in the
5239 chain, since rest_of_compilation relies on that
5240 remaining constant. */
5241 if (prev == NULL)
5242 break;
5243 reorder_insns (insn, insn, prologue_end);
5247 /* Find the last line number note in the first block. */
5248 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5249 insn != prologue_end && insn;
5250 insn = PREV_INSN (insn))
5251 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5252 break;
5254 /* If we didn't find one, make a copy of the first line number
5255 we run across. */
5256 if (! insn)
5258 for (insn = next_active_insn (prologue_end);
5259 insn;
5260 insn = PREV_INSN (insn))
5261 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5263 emit_note_copy_after (insn, prologue_end);
5264 break;
5268 #endif
5269 #ifdef HAVE_epilogue
5270 if (epilogue_end)
5272 rtx insn, next;
5274 /* Similarly, move any line notes that appear after the epilogue.
5275 There is no need, however, to be quite so anal about the existence
5276 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5277 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5278 info generation. */
5279 for (insn = epilogue_end; insn; insn = next)
5281 next = NEXT_INSN (insn);
5282 if (NOTE_P (insn)
5283 && (NOTE_LINE_NUMBER (insn) > 0
5284 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5285 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5286 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5289 #endif
5292 /* Reposition the prologue-end and epilogue-begin notes after instruction
5293 scheduling and delayed branch scheduling. */
5295 void
5296 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5298 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5299 rtx insn, last, note;
5300 int len;
5302 if ((len = VARRAY_SIZE (prologue)) > 0)
5304 last = 0, note = 0;
5306 /* Scan from the beginning until we reach the last prologue insn.
5307 We apparently can't depend on basic_block_{head,end} after
5308 reorg has run. */
5309 for (insn = f; insn; insn = NEXT_INSN (insn))
5311 if (NOTE_P (insn))
5313 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5314 note = insn;
5316 else if (contains (insn, prologue))
5318 last = insn;
5319 if (--len == 0)
5320 break;
5324 if (last)
5326 /* Find the prologue-end note if we haven't already, and
5327 move it to just after the last prologue insn. */
5328 if (note == 0)
5330 for (note = last; (note = NEXT_INSN (note));)
5331 if (NOTE_P (note)
5332 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5333 break;
5336 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5337 if (LABEL_P (last))
5338 last = NEXT_INSN (last);
5339 reorder_insns (note, note, last);
5343 if ((len = VARRAY_SIZE (epilogue)) > 0)
5345 last = 0, note = 0;
5347 /* Scan from the end until we reach the first epilogue insn.
5348 We apparently can't depend on basic_block_{head,end} after
5349 reorg has run. */
5350 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5352 if (NOTE_P (insn))
5354 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5355 note = insn;
5357 else if (contains (insn, epilogue))
5359 last = insn;
5360 if (--len == 0)
5361 break;
5365 if (last)
5367 /* Find the epilogue-begin note if we haven't already, and
5368 move it to just before the first epilogue insn. */
5369 if (note == 0)
5371 for (note = insn; (note = PREV_INSN (note));)
5372 if (NOTE_P (note)
5373 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5374 break;
5377 if (PREV_INSN (last) != note)
5378 reorder_insns (note, note, PREV_INSN (last));
5381 #endif /* HAVE_prologue or HAVE_epilogue */
5384 /* Called once, at initialization, to initialize function.c. */
5386 void
5387 init_function_once (void)
5389 VARRAY_INT_INIT (prologue, 0, "prologue");
5390 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5391 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5394 /* Resets insn_block_boundaries array. */
5396 void
5397 reset_block_changes (void)
5399 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5400 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5403 /* Record the boundary for BLOCK. */
5404 void
5405 record_block_change (tree block)
5407 int i, n;
5408 tree last_block;
5410 if (!block)
5411 return;
5413 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5414 VARRAY_POP (cfun->ib_boundaries_block);
5415 n = get_max_uid ();
5416 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5417 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5419 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5422 /* Finishes record of boundaries. */
5423 void finalize_block_changes (void)
5425 record_block_change (DECL_INITIAL (current_function_decl));
5428 /* For INSN return the BLOCK it belongs to. */
5429 void
5430 check_block_change (rtx insn, tree *block)
5432 unsigned uid = INSN_UID (insn);
5434 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5435 return;
5437 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5440 /* Releases the ib_boundaries_block records. */
5441 void
5442 free_block_changes (void)
5444 cfun->ib_boundaries_block = NULL;
5447 /* Returns the name of the current function. */
5448 const char *
5449 current_function_name (void)
5451 return lang_hooks.decl_printable_name (cfun->decl, 2);
5454 #include "gt-function.h"