* trans-stmt.c (gfc_trans_simple_do): New function.
[official-gcc.git] / gcc / function.c
blob0545b05a5cd8355eabafdd2e3dbd9bb10982344f
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
65 #endif
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
69 #endif
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
77 #ifndef NAME__MAIN
78 #define NAME__MAIN "__main"
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* Nonzero if function being compiled doesn't contain any calls
91 (ignoring the prologue and epilogue). This is set prior to
92 local register allocation and is valid for the remaining
93 compiler passes. */
94 int current_function_is_leaf;
96 /* Nonzero if function being compiled doesn't modify the stack pointer
97 (ignoring the prologue and epilogue). This is only valid after
98 life_analysis has run. */
99 int current_function_sp_is_unchanging;
101 /* Nonzero if the function being compiled is a leaf function which only
102 uses leaf registers. This is valid after reload (specifically after
103 sched2) and is useful only if the port defines LEAF_REGISTERS. */
104 int current_function_uses_only_leaf_regs;
106 /* Nonzero once virtual register instantiation has been done.
107 assign_stack_local uses frame_pointer_rtx when this is nonzero.
108 calls.c:emit_library_call_value_1 uses it to set up
109 post-instantiation libcalls. */
110 int virtuals_instantiated;
112 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
113 static GTY(()) int funcdef_no;
115 /* These variables hold pointers to functions to create and destroy
116 target specific, per-function data structures. */
117 struct machine_function * (*init_machine_status) (void);
119 /* The currently compiled function. */
120 struct function *cfun = 0;
122 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
123 static GTY(()) varray_type prologue;
124 static GTY(()) varray_type epilogue;
126 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
127 in this function. */
128 static GTY(()) varray_type sibcall_epilogue;
130 /* In order to evaluate some expressions, such as function calls returning
131 structures in memory, we need to temporarily allocate stack locations.
132 We record each allocated temporary in the following structure.
134 Associated with each temporary slot is a nesting level. When we pop up
135 one level, all temporaries associated with the previous level are freed.
136 Normally, all temporaries are freed after the execution of the statement
137 in which they were created. However, if we are inside a ({...}) grouping,
138 the result may be in a temporary and hence must be preserved. If the
139 result could be in a temporary, we preserve it if we can determine which
140 one it is in. If we cannot determine which temporary may contain the
141 result, all temporaries are preserved. A temporary is preserved by
142 pretending it was allocated at the previous nesting level.
144 Automatic variables are also assigned temporary slots, at the nesting
145 level where they are defined. They are marked a "kept" so that
146 free_temp_slots will not free them. */
148 struct temp_slot GTY(())
150 /* Points to next temporary slot. */
151 struct temp_slot *next;
152 /* Points to previous temporary slot. */
153 struct temp_slot *prev;
155 /* The rtx to used to reference the slot. */
156 rtx slot;
157 /* The rtx used to represent the address if not the address of the
158 slot above. May be an EXPR_LIST if multiple addresses exist. */
159 rtx address;
160 /* The alignment (in bits) of the slot. */
161 unsigned int align;
162 /* The size, in units, of the slot. */
163 HOST_WIDE_INT size;
164 /* The type of the object in the slot, or zero if it doesn't correspond
165 to a type. We use this to determine whether a slot can be reused.
166 It can be reused if objects of the type of the new slot will always
167 conflict with objects of the type of the old slot. */
168 tree type;
169 /* Nonzero if this temporary is currently in use. */
170 char in_use;
171 /* Nonzero if this temporary has its address taken. */
172 char addr_taken;
173 /* Nesting level at which this slot is being used. */
174 int level;
175 /* Nonzero if this should survive a call to free_temp_slots. */
176 int keep;
177 /* The offset of the slot from the frame_pointer, including extra space
178 for alignment. This info is for combine_temp_slots. */
179 HOST_WIDE_INT base_offset;
180 /* The size of the slot, including extra space for alignment. This
181 info is for combine_temp_slots. */
182 HOST_WIDE_INT full_size;
185 /* Forward declarations. */
187 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
188 struct function *);
189 static struct temp_slot *find_temp_slot_from_address (rtx);
190 static void instantiate_decls (tree, int);
191 static void instantiate_decls_1 (tree, int);
192 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
193 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
194 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
195 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
196 static void pad_below (struct args_size *, enum machine_mode, tree);
197 static void reorder_blocks_1 (rtx, tree, varray_type *);
198 static void reorder_fix_fragments (tree);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
205 static int contains (rtx, varray_type);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block, rtx);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void instantiate_virtual_regs_lossage (rtx);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
224 struct function *
225 find_function_data (tree decl)
227 struct function *p;
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
233 gcc_unreachable ();
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. The caller should use
238 the enter_nested langhook to save any language-specific state,
239 since this function knows only about language-independent
240 variables. */
242 void
243 push_function_context_to (tree context)
245 struct function *p;
247 if (context)
249 if (context == current_function_decl)
250 cfun->contains_functions = 1;
251 else
253 struct function *containing = find_function_data (context);
254 containing->contains_functions = 1;
258 if (cfun == 0)
259 init_dummy_function_start ();
260 p = cfun;
262 p->outer = outer_function_chain;
263 outer_function_chain = p;
265 lang_hooks.function.enter_nested (p);
267 cfun = 0;
270 void
271 push_function_context (void)
273 push_function_context_to (current_function_decl);
276 /* Restore the last saved context, at the end of a nested function.
277 This function is called from language-specific code. */
279 void
280 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
282 struct function *p = outer_function_chain;
284 cfun = p;
285 outer_function_chain = p->outer;
287 current_function_decl = p->decl;
288 reg_renumber = 0;
290 restore_emit_status (p);
292 lang_hooks.function.leave_nested (p);
294 /* Reset variables that have known state during rtx generation. */
295 virtuals_instantiated = 0;
296 generating_concat_p = 1;
299 void
300 pop_function_context (void)
302 pop_function_context_from (current_function_decl);
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been parsed, but not compiled, to let
307 garbage collection reclaim the memory. */
309 void
310 free_after_parsing (struct function *f)
312 /* f->expr->forced_labels is used by code generation. */
313 /* f->emit->regno_reg_rtx is used by code generation. */
314 /* f->varasm is used by code generation. */
315 /* f->eh->eh_return_stub_label is used by code generation. */
317 lang_hooks.function.final (f);
320 /* Clear out all parts of the state in F that can safely be discarded
321 after the function has been compiled, to let garbage collection
322 reclaim the memory. */
324 void
325 free_after_compilation (struct function *f)
327 f->eh = NULL;
328 f->expr = NULL;
329 f->emit = NULL;
330 f->varasm = NULL;
331 f->machine = NULL;
333 f->x_avail_temp_slots = NULL;
334 f->x_used_temp_slots = NULL;
335 f->arg_offset_rtx = NULL;
336 f->return_rtx = NULL;
337 f->internal_arg_pointer = NULL;
338 f->x_nonlocal_goto_handler_labels = NULL;
339 f->x_return_label = NULL;
340 f->x_naked_return_label = NULL;
341 f->x_stack_slot_list = NULL;
342 f->x_tail_recursion_reentry = NULL;
343 f->x_arg_pointer_save_area = NULL;
344 f->x_parm_birth_insn = NULL;
345 f->original_arg_vector = NULL;
346 f->original_decl_initial = NULL;
347 f->epilogue_delay_list = NULL;
350 /* Allocate fixed slots in the stack frame of the current function. */
352 /* Return size needed for stack frame based on slots so far allocated in
353 function F.
354 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
355 the caller may have to do that. */
357 HOST_WIDE_INT
358 get_func_frame_size (struct function *f)
360 #ifdef FRAME_GROWS_DOWNWARD
361 return -f->x_frame_offset;
362 #else
363 return f->x_frame_offset;
364 #endif
367 /* Return size needed for stack frame based on slots so far allocated.
368 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
369 the caller may have to do that. */
370 HOST_WIDE_INT
371 get_frame_size (void)
373 return get_func_frame_size (cfun);
376 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
377 with machine mode MODE.
379 ALIGN controls the amount of alignment for the address of the slot:
380 0 means according to MODE,
381 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
382 -2 means use BITS_PER_UNIT,
383 positive specifies alignment boundary in bits.
385 We do not round to stack_boundary here.
387 FUNCTION specifies the function to allocate in. */
389 static rtx
390 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
391 struct function *function)
393 rtx x, addr;
394 int bigend_correction = 0;
395 unsigned int alignment;
396 int frame_off, frame_alignment, frame_phase;
398 if (align == 0)
400 tree type;
402 if (mode == BLKmode)
403 alignment = BIGGEST_ALIGNMENT;
404 else
405 alignment = GET_MODE_ALIGNMENT (mode);
407 /* Allow the target to (possibly) increase the alignment of this
408 stack slot. */
409 type = lang_hooks.types.type_for_mode (mode, 0);
410 if (type)
411 alignment = LOCAL_ALIGNMENT (type, alignment);
413 alignment /= BITS_PER_UNIT;
415 else if (align == -1)
417 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
418 size = CEIL_ROUND (size, alignment);
420 else if (align == -2)
421 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
422 else
423 alignment = align / BITS_PER_UNIT;
425 #ifdef FRAME_GROWS_DOWNWARD
426 function->x_frame_offset -= size;
427 #endif
429 /* Ignore alignment we can't do with expected alignment of the boundary. */
430 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
431 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
433 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
434 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
436 /* Calculate how many bytes the start of local variables is off from
437 stack alignment. */
438 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
440 frame_phase = frame_off ? frame_alignment - frame_off : 0;
442 /* Round the frame offset to the specified alignment. The default is
443 to always honor requests to align the stack but a port may choose to
444 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
445 if (STACK_ALIGNMENT_NEEDED
446 || mode != BLKmode
447 || size != 0)
449 /* We must be careful here, since FRAME_OFFSET might be negative and
450 division with a negative dividend isn't as well defined as we might
451 like. So we instead assume that ALIGNMENT is a power of two and
452 use logical operations which are unambiguous. */
453 #ifdef FRAME_GROWS_DOWNWARD
454 function->x_frame_offset
455 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
456 (unsigned HOST_WIDE_INT) alignment)
457 + frame_phase);
458 #else
459 function->x_frame_offset
460 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
461 (unsigned HOST_WIDE_INT) alignment)
462 + frame_phase);
463 #endif
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN && mode != BLKmode)
469 bigend_correction = size - GET_MODE_SIZE (mode);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (function == cfun && virtuals_instantiated)
474 addr = plus_constant (frame_pointer_rtx,
475 trunc_int_for_mode
476 (frame_offset + bigend_correction
477 + STARTING_FRAME_OFFSET, Pmode));
478 else
479 addr = plus_constant (virtual_stack_vars_rtx,
480 trunc_int_for_mode
481 (function->x_frame_offset + bigend_correction,
482 Pmode));
484 #ifndef FRAME_GROWS_DOWNWARD
485 function->x_frame_offset += size;
486 #endif
488 x = gen_rtx_MEM (mode, addr);
490 function->x_stack_slot_list
491 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
493 return x;
496 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
497 current function. */
500 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
502 return assign_stack_local_1 (mode, size, align, cfun);
506 /* Removes temporary slot TEMP from LIST. */
508 static void
509 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
511 if (temp->next)
512 temp->next->prev = temp->prev;
513 if (temp->prev)
514 temp->prev->next = temp->next;
515 else
516 *list = temp->next;
518 temp->prev = temp->next = NULL;
521 /* Inserts temporary slot TEMP to LIST. */
523 static void
524 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
526 temp->next = *list;
527 if (*list)
528 (*list)->prev = temp;
529 temp->prev = NULL;
530 *list = temp;
533 /* Returns the list of used temp slots at LEVEL. */
535 static struct temp_slot **
536 temp_slots_at_level (int level)
538 level++;
540 if (!used_temp_slots)
541 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
543 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
544 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
546 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
549 /* Returns the maximal temporary slot level. */
551 static int
552 max_slot_level (void)
554 if (!used_temp_slots)
555 return -1;
557 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
560 /* Moves temporary slot TEMP to LEVEL. */
562 static void
563 move_slot_to_level (struct temp_slot *temp, int level)
565 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
566 insert_slot_to_list (temp, temp_slots_at_level (level));
567 temp->level = level;
570 /* Make temporary slot TEMP available. */
572 static void
573 make_slot_available (struct temp_slot *temp)
575 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
576 insert_slot_to_list (temp, &avail_temp_slots);
577 temp->in_use = 0;
578 temp->level = -1;
581 /* Allocate a temporary stack slot and record it for possible later
582 reuse.
584 MODE is the machine mode to be given to the returned rtx.
586 SIZE is the size in units of the space required. We do no rounding here
587 since assign_stack_local will do any required rounding.
589 KEEP is 1 if this slot is to be retained after a call to
590 free_temp_slots. Automatic variables for a block are allocated
591 with this flag. KEEP values of 2 or 3 were needed respectively
592 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
593 or for SAVE_EXPRs, but they are now unused and will abort.
595 TYPE is the type that will be used for the stack slot. */
598 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
599 tree type)
601 unsigned int align;
602 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
603 rtx slot;
605 /* If SIZE is -1 it means that somebody tried to allocate a temporary
606 of a variable size. */
607 gcc_assert (size != -1);
609 /* These are now unused. */
610 gcc_assert (keep <= 1);
612 if (mode == BLKmode)
613 align = BIGGEST_ALIGNMENT;
614 else
615 align = GET_MODE_ALIGNMENT (mode);
617 if (! type)
618 type = lang_hooks.types.type_for_mode (mode, 0);
620 if (type)
621 align = LOCAL_ALIGNMENT (type, align);
623 /* Try to find an available, already-allocated temporary of the proper
624 mode which meets the size and alignment requirements. Choose the
625 smallest one with the closest alignment. */
626 for (p = avail_temp_slots; p; p = p->next)
628 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
629 && objects_must_conflict_p (p->type, type)
630 && (best_p == 0 || best_p->size > p->size
631 || (best_p->size == p->size && best_p->align > p->align)))
633 if (p->align == align && p->size == size)
635 selected = p;
636 cut_slot_from_list (selected, &avail_temp_slots);
637 best_p = 0;
638 break;
640 best_p = p;
644 /* Make our best, if any, the one to use. */
645 if (best_p)
647 selected = best_p;
648 cut_slot_from_list (selected, &avail_temp_slots);
650 /* If there are enough aligned bytes left over, make them into a new
651 temp_slot so that the extra bytes don't get wasted. Do this only
652 for BLKmode slots, so that we can be sure of the alignment. */
653 if (GET_MODE (best_p->slot) == BLKmode)
655 int alignment = best_p->align / BITS_PER_UNIT;
656 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
658 if (best_p->size - rounded_size >= alignment)
660 p = ggc_alloc (sizeof (struct temp_slot));
661 p->in_use = p->addr_taken = 0;
662 p->size = best_p->size - rounded_size;
663 p->base_offset = best_p->base_offset + rounded_size;
664 p->full_size = best_p->full_size - rounded_size;
665 p->slot = gen_rtx_MEM (BLKmode,
666 plus_constant (XEXP (best_p->slot, 0),
667 rounded_size));
668 p->align = best_p->align;
669 p->address = 0;
670 p->type = best_p->type;
671 insert_slot_to_list (p, &avail_temp_slots);
673 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
674 stack_slot_list);
676 best_p->size = rounded_size;
677 best_p->full_size = rounded_size;
682 /* If we still didn't find one, make a new temporary. */
683 if (selected == 0)
685 HOST_WIDE_INT frame_offset_old = frame_offset;
687 p = ggc_alloc (sizeof (struct temp_slot));
689 /* We are passing an explicit alignment request to assign_stack_local.
690 One side effect of that is assign_stack_local will not round SIZE
691 to ensure the frame offset remains suitably aligned.
693 So for requests which depended on the rounding of SIZE, we go ahead
694 and round it now. We also make sure ALIGNMENT is at least
695 BIGGEST_ALIGNMENT. */
696 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
697 p->slot = assign_stack_local (mode,
698 (mode == BLKmode
699 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
700 : size),
701 align);
703 p->align = align;
705 /* The following slot size computation is necessary because we don't
706 know the actual size of the temporary slot until assign_stack_local
707 has performed all the frame alignment and size rounding for the
708 requested temporary. Note that extra space added for alignment
709 can be either above or below this stack slot depending on which
710 way the frame grows. We include the extra space if and only if it
711 is above this slot. */
712 #ifdef FRAME_GROWS_DOWNWARD
713 p->size = frame_offset_old - frame_offset;
714 #else
715 p->size = size;
716 #endif
718 /* Now define the fields used by combine_temp_slots. */
719 #ifdef FRAME_GROWS_DOWNWARD
720 p->base_offset = frame_offset;
721 p->full_size = frame_offset_old - frame_offset;
722 #else
723 p->base_offset = frame_offset_old;
724 p->full_size = frame_offset - frame_offset_old;
725 #endif
726 p->address = 0;
728 selected = p;
731 p = selected;
732 p->in_use = 1;
733 p->addr_taken = 0;
734 p->type = type;
735 p->level = temp_slot_level;
736 p->keep = keep;
738 pp = temp_slots_at_level (p->level);
739 insert_slot_to_list (p, pp);
741 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
742 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
743 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
745 /* If we know the alias set for the memory that will be used, use
746 it. If there's no TYPE, then we don't know anything about the
747 alias set for the memory. */
748 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
749 set_mem_align (slot, align);
751 /* If a type is specified, set the relevant flags. */
752 if (type != 0)
754 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
755 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
758 return slot;
761 /* Allocate a temporary stack slot and record it for possible later
762 reuse. First three arguments are same as in preceding function. */
765 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
767 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
770 /* Assign a temporary.
771 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
772 and so that should be used in error messages. In either case, we
773 allocate of the given type.
774 KEEP is as for assign_stack_temp.
775 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
776 it is 0 if a register is OK.
777 DONT_PROMOTE is 1 if we should not promote values in register
778 to wider modes. */
781 assign_temp (tree type_or_decl, int keep, int memory_required,
782 int dont_promote ATTRIBUTE_UNUSED)
784 tree type, decl;
785 enum machine_mode mode;
786 #ifdef PROMOTE_MODE
787 int unsignedp;
788 #endif
790 if (DECL_P (type_or_decl))
791 decl = type_or_decl, type = TREE_TYPE (decl);
792 else
793 decl = NULL, type = type_or_decl;
795 mode = TYPE_MODE (type);
796 #ifdef PROMOTE_MODE
797 unsignedp = TYPE_UNSIGNED (type);
798 #endif
800 if (mode == BLKmode || memory_required)
802 HOST_WIDE_INT size = int_size_in_bytes (type);
803 tree size_tree;
804 rtx tmp;
806 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
807 problems with allocating the stack space. */
808 if (size == 0)
809 size = 1;
811 /* Unfortunately, we don't yet know how to allocate variable-sized
812 temporaries. However, sometimes we have a fixed upper limit on
813 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
814 instead. This is the case for Chill variable-sized strings. */
815 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
816 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
817 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
818 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
820 /* If we still haven't been able to get a size, see if the language
821 can compute a maximum size. */
822 if (size == -1
823 && (size_tree = lang_hooks.types.max_size (type)) != 0
824 && host_integerp (size_tree, 1))
825 size = tree_low_cst (size_tree, 1);
827 /* The size of the temporary may be too large to fit into an integer. */
828 /* ??? Not sure this should happen except for user silliness, so limit
829 this to things that aren't compiler-generated temporaries. The
830 rest of the time we'll abort in assign_stack_temp_for_type. */
831 if (decl && size == -1
832 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
834 error ("%Jsize of variable %qD is too large", decl, decl);
835 size = 1;
838 tmp = assign_stack_temp_for_type (mode, size, keep, type);
839 return tmp;
842 #ifdef PROMOTE_MODE
843 if (! dont_promote)
844 mode = promote_mode (type, mode, &unsignedp, 0);
845 #endif
847 return gen_reg_rtx (mode);
850 /* Combine temporary stack slots which are adjacent on the stack.
852 This allows for better use of already allocated stack space. This is only
853 done for BLKmode slots because we can be sure that we won't have alignment
854 problems in this case. */
856 void
857 combine_temp_slots (void)
859 struct temp_slot *p, *q, *next, *next_q;
860 int num_slots;
862 /* We can't combine slots, because the information about which slot
863 is in which alias set will be lost. */
864 if (flag_strict_aliasing)
865 return;
867 /* If there are a lot of temp slots, don't do anything unless
868 high levels of optimization. */
869 if (! flag_expensive_optimizations)
870 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
871 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
872 return;
874 for (p = avail_temp_slots; p; p = next)
876 int delete_p = 0;
878 next = p->next;
880 if (GET_MODE (p->slot) != BLKmode)
881 continue;
883 for (q = p->next; q; q = next_q)
885 int delete_q = 0;
887 next_q = q->next;
889 if (GET_MODE (q->slot) != BLKmode)
890 continue;
892 if (p->base_offset + p->full_size == q->base_offset)
894 /* Q comes after P; combine Q into P. */
895 p->size += q->size;
896 p->full_size += q->full_size;
897 delete_q = 1;
899 else if (q->base_offset + q->full_size == p->base_offset)
901 /* P comes after Q; combine P into Q. */
902 q->size += p->size;
903 q->full_size += p->full_size;
904 delete_p = 1;
905 break;
907 if (delete_q)
908 cut_slot_from_list (q, &avail_temp_slots);
911 /* Either delete P or advance past it. */
912 if (delete_p)
913 cut_slot_from_list (p, &avail_temp_slots);
917 /* Find the temp slot corresponding to the object at address X. */
919 static struct temp_slot *
920 find_temp_slot_from_address (rtx x)
922 struct temp_slot *p;
923 rtx next;
924 int i;
926 for (i = max_slot_level (); i >= 0; i--)
927 for (p = *temp_slots_at_level (i); p; p = p->next)
929 if (XEXP (p->slot, 0) == x
930 || p->address == x
931 || (GET_CODE (x) == PLUS
932 && XEXP (x, 0) == virtual_stack_vars_rtx
933 && GET_CODE (XEXP (x, 1)) == CONST_INT
934 && INTVAL (XEXP (x, 1)) >= p->base_offset
935 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
936 return p;
938 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
939 for (next = p->address; next; next = XEXP (next, 1))
940 if (XEXP (next, 0) == x)
941 return p;
944 /* If we have a sum involving a register, see if it points to a temp
945 slot. */
946 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
947 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
948 return p;
949 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
950 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
951 return p;
953 return 0;
956 /* Indicate that NEW is an alternate way of referring to the temp slot
957 that previously was known by OLD. */
959 void
960 update_temp_slot_address (rtx old, rtx new)
962 struct temp_slot *p;
964 if (rtx_equal_p (old, new))
965 return;
967 p = find_temp_slot_from_address (old);
969 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
970 is a register, see if one operand of the PLUS is a temporary
971 location. If so, NEW points into it. Otherwise, if both OLD and
972 NEW are a PLUS and if there is a register in common between them.
973 If so, try a recursive call on those values. */
974 if (p == 0)
976 if (GET_CODE (old) != PLUS)
977 return;
979 if (REG_P (new))
981 update_temp_slot_address (XEXP (old, 0), new);
982 update_temp_slot_address (XEXP (old, 1), new);
983 return;
985 else if (GET_CODE (new) != PLUS)
986 return;
988 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
991 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
992 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
994 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
995 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
997 return;
1000 /* Otherwise add an alias for the temp's address. */
1001 else if (p->address == 0)
1002 p->address = new;
1003 else
1005 if (GET_CODE (p->address) != EXPR_LIST)
1006 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1008 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1012 /* If X could be a reference to a temporary slot, mark the fact that its
1013 address was taken. */
1015 void
1016 mark_temp_addr_taken (rtx x)
1018 struct temp_slot *p;
1020 if (x == 0)
1021 return;
1023 /* If X is not in memory or is at a constant address, it cannot be in
1024 a temporary slot. */
1025 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1026 return;
1028 p = find_temp_slot_from_address (XEXP (x, 0));
1029 if (p != 0)
1030 p->addr_taken = 1;
1033 /* If X could be a reference to a temporary slot, mark that slot as
1034 belonging to the to one level higher than the current level. If X
1035 matched one of our slots, just mark that one. Otherwise, we can't
1036 easily predict which it is, so upgrade all of them. Kept slots
1037 need not be touched.
1039 This is called when an ({...}) construct occurs and a statement
1040 returns a value in memory. */
1042 void
1043 preserve_temp_slots (rtx x)
1045 struct temp_slot *p = 0, *next;
1047 /* If there is no result, we still might have some objects whose address
1048 were taken, so we need to make sure they stay around. */
1049 if (x == 0)
1051 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1053 next = p->next;
1055 if (p->addr_taken)
1056 move_slot_to_level (p, temp_slot_level - 1);
1059 return;
1062 /* If X is a register that is being used as a pointer, see if we have
1063 a temporary slot we know it points to. To be consistent with
1064 the code below, we really should preserve all non-kept slots
1065 if we can't find a match, but that seems to be much too costly. */
1066 if (REG_P (x) && REG_POINTER (x))
1067 p = find_temp_slot_from_address (x);
1069 /* If X is not in memory or is at a constant address, it cannot be in
1070 a temporary slot, but it can contain something whose address was
1071 taken. */
1072 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1074 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1076 next = p->next;
1078 if (p->addr_taken)
1079 move_slot_to_level (p, temp_slot_level - 1);
1082 return;
1085 /* First see if we can find a match. */
1086 if (p == 0)
1087 p = find_temp_slot_from_address (XEXP (x, 0));
1089 if (p != 0)
1091 /* Move everything at our level whose address was taken to our new
1092 level in case we used its address. */
1093 struct temp_slot *q;
1095 if (p->level == temp_slot_level)
1097 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1099 next = q->next;
1101 if (p != q && q->addr_taken)
1102 move_slot_to_level (q, temp_slot_level - 1);
1105 move_slot_to_level (p, temp_slot_level - 1);
1106 p->addr_taken = 0;
1108 return;
1111 /* Otherwise, preserve all non-kept slots at this level. */
1112 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1114 next = p->next;
1116 if (!p->keep)
1117 move_slot_to_level (p, temp_slot_level - 1);
1121 /* Free all temporaries used so far. This is normally called at the
1122 end of generating code for a statement. */
1124 void
1125 free_temp_slots (void)
1127 struct temp_slot *p, *next;
1129 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1131 next = p->next;
1133 if (!p->keep)
1134 make_slot_available (p);
1137 combine_temp_slots ();
1140 /* Push deeper into the nesting level for stack temporaries. */
1142 void
1143 push_temp_slots (void)
1145 temp_slot_level++;
1148 /* Pop a temporary nesting level. All slots in use in the current level
1149 are freed. */
1151 void
1152 pop_temp_slots (void)
1154 struct temp_slot *p, *next;
1156 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1158 next = p->next;
1159 make_slot_available (p);
1162 combine_temp_slots ();
1164 temp_slot_level--;
1167 /* Initialize temporary slots. */
1169 void
1170 init_temp_slots (void)
1172 /* We have not allocated any temporaries yet. */
1173 avail_temp_slots = 0;
1174 used_temp_slots = 0;
1175 temp_slot_level = 0;
1178 /* These routines are responsible for converting virtual register references
1179 to the actual hard register references once RTL generation is complete.
1181 The following four variables are used for communication between the
1182 routines. They contain the offsets of the virtual registers from their
1183 respective hard registers. */
1185 static int in_arg_offset;
1186 static int var_offset;
1187 static int dynamic_offset;
1188 static int out_arg_offset;
1189 static int cfa_offset;
1191 /* In most machines, the stack pointer register is equivalent to the bottom
1192 of the stack. */
1194 #ifndef STACK_POINTER_OFFSET
1195 #define STACK_POINTER_OFFSET 0
1196 #endif
1198 /* If not defined, pick an appropriate default for the offset of dynamically
1199 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1200 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1202 #ifndef STACK_DYNAMIC_OFFSET
1204 /* The bottom of the stack points to the actual arguments. If
1205 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1206 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1207 stack space for register parameters is not pushed by the caller, but
1208 rather part of the fixed stack areas and hence not included in
1209 `current_function_outgoing_args_size'. Nevertheless, we must allow
1210 for it when allocating stack dynamic objects. */
1212 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1213 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1214 ((ACCUMULATE_OUTGOING_ARGS \
1215 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1216 + (STACK_POINTER_OFFSET)) \
1218 #else
1219 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1220 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1221 + (STACK_POINTER_OFFSET))
1222 #endif
1223 #endif
1225 /* On most machines, the CFA coincides with the first incoming parm. */
1227 #ifndef ARG_POINTER_CFA_OFFSET
1228 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1229 #endif
1232 /* Pass through the INSNS of function FNDECL and convert virtual register
1233 references to hard register references. */
1235 void
1236 instantiate_virtual_regs (void)
1238 rtx insn;
1240 /* Compute the offsets to use for this function. */
1241 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1242 var_offset = STARTING_FRAME_OFFSET;
1243 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1244 out_arg_offset = STACK_POINTER_OFFSET;
1245 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1247 /* Scan all variables and parameters of this function. For each that is
1248 in memory, instantiate all virtual registers if the result is a valid
1249 address. If not, we do it later. That will handle most uses of virtual
1250 regs on many machines. */
1251 instantiate_decls (current_function_decl, 1);
1253 /* Initialize recognition, indicating that volatile is OK. */
1254 init_recog ();
1256 /* Scan through all the insns, instantiating every virtual register still
1257 present. */
1258 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1259 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1260 || GET_CODE (insn) == CALL_INSN)
1262 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1263 if (INSN_DELETED_P (insn))
1264 continue;
1265 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1266 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1267 if (GET_CODE (insn) == CALL_INSN)
1268 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1269 NULL_RTX, 0);
1271 /* Past this point all ASM statements should match. Verify that
1272 to avoid failures later in the compilation process. */
1273 if (asm_noperands (PATTERN (insn)) >= 0
1274 && ! check_asm_operands (PATTERN (insn)))
1275 instantiate_virtual_regs_lossage (insn);
1278 /* Now instantiate the remaining register equivalences for debugging info.
1279 These will not be valid addresses. */
1280 instantiate_decls (current_function_decl, 0);
1282 /* Indicate that, from now on, assign_stack_local should use
1283 frame_pointer_rtx. */
1284 virtuals_instantiated = 1;
1287 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1288 all virtual registers in their DECL_RTL's.
1290 If VALID_ONLY, do this only if the resulting address is still valid.
1291 Otherwise, always do it. */
1293 static void
1294 instantiate_decls (tree fndecl, int valid_only)
1296 tree decl;
1298 /* Process all parameters of the function. */
1299 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1302 HOST_WIDE_INT size_rtl;
1304 instantiate_decl (DECL_RTL (decl), size, valid_only);
1306 /* If the parameter was promoted, then the incoming RTL mode may be
1307 larger than the declared type size. We must use the larger of
1308 the two sizes. */
1309 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1310 size = MAX (size_rtl, size);
1311 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1314 /* Now process all variables defined in the function or its subblocks. */
1315 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1318 /* Subroutine of instantiate_decls: Process all decls in the given
1319 BLOCK node and all its subblocks. */
1321 static void
1322 instantiate_decls_1 (tree let, int valid_only)
1324 tree t;
1326 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1327 if (DECL_RTL_SET_P (t))
1328 instantiate_decl (DECL_RTL (t),
1329 int_size_in_bytes (TREE_TYPE (t)),
1330 valid_only);
1332 /* Process all subblocks. */
1333 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1334 instantiate_decls_1 (t, valid_only);
1337 /* Subroutine of the preceding procedures: Given RTL representing a
1338 decl and the size of the object, do any instantiation required.
1340 If VALID_ONLY is nonzero, it means that the RTL should only be
1341 changed if the new address is valid. */
1343 static void
1344 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1346 enum machine_mode mode;
1347 rtx addr;
1349 /* If this is not a MEM, no need to do anything. Similarly if the
1350 address is a constant or a register that is not a virtual register. */
1352 if (x == 0 || !MEM_P (x))
1353 return;
1355 addr = XEXP (x, 0);
1356 if (CONSTANT_P (addr)
1357 || (REG_P (addr)
1358 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1359 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1360 return;
1362 /* If we should only do this if the address is valid, copy the address.
1363 We need to do this so we can undo any changes that might make the
1364 address invalid. This copy is unfortunate, but probably can't be
1365 avoided. */
1367 if (valid_only)
1368 addr = copy_rtx (addr);
1370 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1372 if (valid_only && size >= 0)
1374 unsigned HOST_WIDE_INT decl_size = size;
1376 /* Now verify that the resulting address is valid for every integer or
1377 floating-point mode up to and including SIZE bytes long. We do this
1378 since the object might be accessed in any mode and frame addresses
1379 are shared. */
1381 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1382 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1383 mode = GET_MODE_WIDER_MODE (mode))
1384 if (! memory_address_p (mode, addr))
1385 return;
1387 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1388 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1389 mode = GET_MODE_WIDER_MODE (mode))
1390 if (! memory_address_p (mode, addr))
1391 return;
1394 /* Put back the address now that we have updated it and we either know
1395 it is valid or we don't care whether it is valid. */
1397 XEXP (x, 0) = addr;
1400 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1401 is a virtual register, return the equivalent hard register and set the
1402 offset indirectly through the pointer. Otherwise, return 0. */
1404 static rtx
1405 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1407 rtx new;
1408 HOST_WIDE_INT offset;
1410 if (x == virtual_incoming_args_rtx)
1411 new = arg_pointer_rtx, offset = in_arg_offset;
1412 else if (x == virtual_stack_vars_rtx)
1413 new = frame_pointer_rtx, offset = var_offset;
1414 else if (x == virtual_stack_dynamic_rtx)
1415 new = stack_pointer_rtx, offset = dynamic_offset;
1416 else if (x == virtual_outgoing_args_rtx)
1417 new = stack_pointer_rtx, offset = out_arg_offset;
1418 else if (x == virtual_cfa_rtx)
1419 new = arg_pointer_rtx, offset = cfa_offset;
1420 else
1421 return 0;
1423 *poffset = offset;
1424 return new;
1428 /* Called when instantiate_virtual_regs has failed to update the instruction.
1429 Usually this means that non-matching instruction has been emit, however for
1430 asm statements it may be the problem in the constraints. */
1431 static void
1432 instantiate_virtual_regs_lossage (rtx insn)
1434 gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
1435 error_for_asm (insn, "impossible constraint in %<asm%>");
1436 delete_insn (insn);
1438 /* Given a pointer to a piece of rtx and an optional pointer to the
1439 containing object, instantiate any virtual registers present in it.
1441 If EXTRA_INSNS, we always do the replacement and generate
1442 any extra insns before OBJECT. If it zero, we do nothing if replacement
1443 is not valid.
1445 Return 1 if we either had nothing to do or if we were able to do the
1446 needed replacement. Return 0 otherwise; we only return zero if
1447 EXTRA_INSNS is zero.
1449 We first try some simple transformations to avoid the creation of extra
1450 pseudos. */
1452 static int
1453 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1455 rtx x;
1456 RTX_CODE code;
1457 rtx new = 0;
1458 HOST_WIDE_INT offset = 0;
1459 rtx temp;
1460 rtx seq;
1461 int i, j;
1462 const char *fmt;
1464 /* Re-start here to avoid recursion in common cases. */
1465 restart:
1467 x = *loc;
1468 if (x == 0)
1469 return 1;
1471 /* We may have detected and deleted invalid asm statements. */
1472 if (object && INSN_P (object) && INSN_DELETED_P (object))
1473 return 1;
1475 code = GET_CODE (x);
1477 /* Check for some special cases. */
1478 switch (code)
1480 case CONST_INT:
1481 case CONST_DOUBLE:
1482 case CONST_VECTOR:
1483 case CONST:
1484 case SYMBOL_REF:
1485 case CODE_LABEL:
1486 case PC:
1487 case CC0:
1488 case ASM_INPUT:
1489 case ADDR_VEC:
1490 case ADDR_DIFF_VEC:
1491 case RETURN:
1492 return 1;
1494 case SET:
1495 /* We are allowed to set the virtual registers. This means that
1496 the actual register should receive the source minus the
1497 appropriate offset. This is used, for example, in the handling
1498 of non-local gotos. */
1499 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1501 rtx src = SET_SRC (x);
1503 /* We are setting the register, not using it, so the relevant
1504 offset is the negative of the offset to use were we using
1505 the register. */
1506 offset = - offset;
1507 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1509 /* The only valid sources here are PLUS or REG. Just do
1510 the simplest possible thing to handle them. */
1511 if (!REG_P (src) && GET_CODE (src) != PLUS)
1513 instantiate_virtual_regs_lossage (object);
1514 return 1;
1517 start_sequence ();
1518 if (!REG_P (src))
1519 temp = force_operand (src, NULL_RTX);
1520 else
1521 temp = src;
1522 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1523 seq = get_insns ();
1524 end_sequence ();
1526 emit_insn_before (seq, object);
1527 SET_DEST (x) = new;
1529 if (! validate_change (object, &SET_SRC (x), temp, 0)
1530 || ! extra_insns)
1531 instantiate_virtual_regs_lossage (object);
1533 return 1;
1536 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1537 loc = &SET_SRC (x);
1538 goto restart;
1540 case PLUS:
1541 /* Handle special case of virtual register plus constant. */
1542 if (CONSTANT_P (XEXP (x, 1)))
1544 rtx old, new_offset;
1546 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1547 if (GET_CODE (XEXP (x, 0)) == PLUS)
1549 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1551 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1552 extra_insns);
1553 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1555 else
1557 loc = &XEXP (x, 0);
1558 goto restart;
1562 #ifdef POINTERS_EXTEND_UNSIGNED
1563 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1564 we can commute the PLUS and SUBREG because pointers into the
1565 frame are well-behaved. */
1566 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1567 && GET_CODE (XEXP (x, 1)) == CONST_INT
1568 && 0 != (new
1569 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1570 &offset))
1571 && validate_change (object, loc,
1572 plus_constant (gen_lowpart (ptr_mode,
1573 new),
1574 offset
1575 + INTVAL (XEXP (x, 1))),
1577 return 1;
1578 #endif
1579 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1581 /* We know the second operand is a constant. Unless the
1582 first operand is a REG (which has been already checked),
1583 it needs to be checked. */
1584 if (!REG_P (XEXP (x, 0)))
1586 loc = &XEXP (x, 0);
1587 goto restart;
1589 return 1;
1592 new_offset = plus_constant (XEXP (x, 1), offset);
1594 /* If the new constant is zero, try to replace the sum with just
1595 the register. */
1596 if (new_offset == const0_rtx
1597 && validate_change (object, loc, new, 0))
1598 return 1;
1600 /* Next try to replace the register and new offset.
1601 There are two changes to validate here and we can't assume that
1602 in the case of old offset equals new just changing the register
1603 will yield a valid insn. In the interests of a little efficiency,
1604 however, we only call validate change once (we don't queue up the
1605 changes and then call apply_change_group). */
1607 old = XEXP (x, 0);
1608 if (offset == 0
1609 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1610 : (XEXP (x, 0) = new,
1611 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1613 if (! extra_insns)
1615 XEXP (x, 0) = old;
1616 return 0;
1619 /* Otherwise copy the new constant into a register and replace
1620 constant with that register. */
1621 temp = gen_reg_rtx (Pmode);
1622 XEXP (x, 0) = new;
1623 if (validate_change (object, &XEXP (x, 1), temp, 0))
1624 emit_insn_before (gen_move_insn (temp, new_offset), object);
1625 else
1627 /* If that didn't work, replace this expression with a
1628 register containing the sum. */
1630 XEXP (x, 0) = old;
1631 new = gen_rtx_PLUS (Pmode, new, new_offset);
1633 start_sequence ();
1634 temp = force_operand (new, NULL_RTX);
1635 seq = get_insns ();
1636 end_sequence ();
1638 emit_insn_before (seq, object);
1639 if (! validate_change (object, loc, temp, 0)
1640 && ! validate_replace_rtx (x, temp, object))
1642 instantiate_virtual_regs_lossage (object);
1643 return 1;
1648 return 1;
1651 /* Fall through to generic two-operand expression case. */
1652 case EXPR_LIST:
1653 case CALL:
1654 case COMPARE:
1655 case MINUS:
1656 case MULT:
1657 case DIV: case UDIV:
1658 case MOD: case UMOD:
1659 case AND: case IOR: case XOR:
1660 case ROTATERT: case ROTATE:
1661 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1662 case NE: case EQ:
1663 case GE: case GT: case GEU: case GTU:
1664 case LE: case LT: case LEU: case LTU:
1665 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1666 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1667 loc = &XEXP (x, 0);
1668 goto restart;
1670 case MEM:
1671 /* Most cases of MEM that convert to valid addresses have already been
1672 handled by our scan of decls. The only special handling we
1673 need here is to make a copy of the rtx to ensure it isn't being
1674 shared if we have to change it to a pseudo.
1676 If the rtx is a simple reference to an address via a virtual register,
1677 it can potentially be shared. In such cases, first try to make it
1678 a valid address, which can also be shared. Otherwise, copy it and
1679 proceed normally.
1681 First check for common cases that need no processing. These are
1682 usually due to instantiation already being done on a previous instance
1683 of a shared rtx. */
1685 temp = XEXP (x, 0);
1686 if (CONSTANT_ADDRESS_P (temp)
1687 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1688 || temp == arg_pointer_rtx
1689 #endif
1690 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1691 || temp == hard_frame_pointer_rtx
1692 #endif
1693 || temp == frame_pointer_rtx)
1694 return 1;
1696 if (GET_CODE (temp) == PLUS
1697 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1698 && (XEXP (temp, 0) == frame_pointer_rtx
1699 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1700 || XEXP (temp, 0) == hard_frame_pointer_rtx
1701 #endif
1702 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1703 || XEXP (temp, 0) == arg_pointer_rtx
1704 #endif
1706 return 1;
1708 if (temp == virtual_stack_vars_rtx
1709 || temp == virtual_incoming_args_rtx
1710 || (GET_CODE (temp) == PLUS
1711 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1712 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1713 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1715 /* This MEM may be shared. If the substitution can be done without
1716 the need to generate new pseudos, we want to do it in place
1717 so all copies of the shared rtx benefit. The call below will
1718 only make substitutions if the resulting address is still
1719 valid.
1721 Note that we cannot pass X as the object in the recursive call
1722 since the insn being processed may not allow all valid
1723 addresses. However, if we were not passed on object, we can
1724 only modify X without copying it if X will have a valid
1725 address.
1727 ??? Also note that this can still lose if OBJECT is an insn that
1728 has less restrictions on an address that some other insn.
1729 In that case, we will modify the shared address. This case
1730 doesn't seem very likely, though. One case where this could
1731 happen is in the case of a USE or CLOBBER reference, but we
1732 take care of that below. */
1734 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1735 object ? object : x, 0))
1736 return 1;
1738 /* Otherwise make a copy and process that copy. We copy the entire
1739 RTL expression since it might be a PLUS which could also be
1740 shared. */
1741 *loc = x = copy_rtx (x);
1744 /* Fall through to generic unary operation case. */
1745 case PREFETCH:
1746 case SUBREG:
1747 case STRICT_LOW_PART:
1748 case NEG: case NOT:
1749 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1750 case SIGN_EXTEND: case ZERO_EXTEND:
1751 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1752 case FLOAT: case FIX:
1753 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1754 case ABS:
1755 case SQRT:
1756 case FFS:
1757 case CLZ: case CTZ:
1758 case POPCOUNT: case PARITY:
1759 /* These case either have just one operand or we know that we need not
1760 check the rest of the operands. */
1761 loc = &XEXP (x, 0);
1762 goto restart;
1764 case USE:
1765 case CLOBBER:
1766 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1767 go ahead and make the invalid one, but do it to a copy. For a REG,
1768 just make the recursive call, since there's no chance of a problem. */
1770 if ((MEM_P (XEXP (x, 0))
1771 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1773 || (REG_P (XEXP (x, 0))
1774 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1775 return 1;
1777 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1778 loc = &XEXP (x, 0);
1779 goto restart;
1781 case REG:
1782 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1783 in front of this insn and substitute the temporary. */
1784 if ((new = instantiate_new_reg (x, &offset)) != 0)
1786 temp = plus_constant (new, offset);
1787 if (!validate_change (object, loc, temp, 0))
1789 if (! extra_insns)
1790 return 0;
1792 start_sequence ();
1793 temp = force_operand (temp, NULL_RTX);
1794 seq = get_insns ();
1795 end_sequence ();
1797 emit_insn_before (seq, object);
1798 if (! validate_change (object, loc, temp, 0)
1799 && ! validate_replace_rtx (x, temp, object))
1800 instantiate_virtual_regs_lossage (object);
1804 return 1;
1806 default:
1807 break;
1810 /* Scan all subexpressions. */
1811 fmt = GET_RTX_FORMAT (code);
1812 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1813 if (*fmt == 'e')
1815 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1816 return 0;
1818 else if (*fmt == 'E')
1819 for (j = 0; j < XVECLEN (x, i); j++)
1820 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1821 extra_insns))
1822 return 0;
1824 return 1;
1827 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1828 This means a type for which function calls must pass an address to the
1829 function or get an address back from the function.
1830 EXP may be a type node or an expression (whose type is tested). */
1833 aggregate_value_p (tree exp, tree fntype)
1835 int i, regno, nregs;
1836 rtx reg;
1838 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1840 if (fntype)
1841 switch (TREE_CODE (fntype))
1843 case CALL_EXPR:
1844 fntype = get_callee_fndecl (fntype);
1845 fntype = fntype ? TREE_TYPE (fntype) : 0;
1846 break;
1847 case FUNCTION_DECL:
1848 fntype = TREE_TYPE (fntype);
1849 break;
1850 case FUNCTION_TYPE:
1851 case METHOD_TYPE:
1852 break;
1853 case IDENTIFIER_NODE:
1854 fntype = 0;
1855 break;
1856 default:
1857 /* We don't expect other rtl types here. */
1858 gcc_unreachable ();
1861 if (TREE_CODE (type) == VOID_TYPE)
1862 return 0;
1863 /* If the front end has decided that this needs to be passed by
1864 reference, do so. */
1865 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1866 && DECL_BY_REFERENCE (exp))
1867 return 1;
1868 if (targetm.calls.return_in_memory (type, fntype))
1869 return 1;
1870 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1871 and thus can't be returned in registers. */
1872 if (TREE_ADDRESSABLE (type))
1873 return 1;
1874 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1875 return 1;
1876 /* Make sure we have suitable call-clobbered regs to return
1877 the value in; if not, we must return it in memory. */
1878 reg = hard_function_value (type, 0, 0);
1880 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1881 it is OK. */
1882 if (!REG_P (reg))
1883 return 0;
1885 regno = REGNO (reg);
1886 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1887 for (i = 0; i < nregs; i++)
1888 if (! call_used_regs[regno + i])
1889 return 1;
1890 return 0;
1893 /* Return true if we should assign DECL a pseudo register; false if it
1894 should live on the local stack. */
1896 bool
1897 use_register_for_decl (tree decl)
1899 /* Honor volatile. */
1900 if (TREE_SIDE_EFFECTS (decl))
1901 return false;
1903 /* Honor addressability. */
1904 if (TREE_ADDRESSABLE (decl))
1905 return false;
1907 /* Only register-like things go in registers. */
1908 if (DECL_MODE (decl) == BLKmode)
1909 return false;
1911 /* If -ffloat-store specified, don't put explicit float variables
1912 into registers. */
1913 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1914 propagates values across these stores, and it probably shouldn't. */
1915 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1916 return false;
1918 /* Compiler-generated temporaries can always go in registers. */
1919 if (DECL_ARTIFICIAL (decl))
1920 return true;
1922 #ifdef NON_SAVING_SETJMP
1923 /* Protect variables not declared "register" from setjmp. */
1924 if (NON_SAVING_SETJMP
1925 && current_function_calls_setjmp
1926 && !DECL_REGISTER (decl))
1927 return false;
1928 #endif
1930 return (optimize || DECL_REGISTER (decl));
1933 /* Return true if TYPE should be passed by invisible reference. */
1935 bool
1936 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1937 tree type, bool named_arg)
1939 if (type)
1941 /* If this type contains non-trivial constructors, then it is
1942 forbidden for the middle-end to create any new copies. */
1943 if (TREE_ADDRESSABLE (type))
1944 return true;
1946 /* GCC post 3.4 passes *all* variable sized types by reference. */
1947 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1948 return true;
1951 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1954 /* Return true if TYPE, which is passed by reference, should be callee
1955 copied instead of caller copied. */
1957 bool
1958 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1959 tree type, bool named_arg)
1961 if (type && TREE_ADDRESSABLE (type))
1962 return false;
1963 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1966 /* Structures to communicate between the subroutines of assign_parms.
1967 The first holds data persistent across all parameters, the second
1968 is cleared out for each parameter. */
1970 struct assign_parm_data_all
1972 CUMULATIVE_ARGS args_so_far;
1973 struct args_size stack_args_size;
1974 tree function_result_decl;
1975 tree orig_fnargs;
1976 rtx conversion_insns;
1977 HOST_WIDE_INT pretend_args_size;
1978 HOST_WIDE_INT extra_pretend_bytes;
1979 int reg_parm_stack_space;
1982 struct assign_parm_data_one
1984 tree nominal_type;
1985 tree passed_type;
1986 rtx entry_parm;
1987 rtx stack_parm;
1988 enum machine_mode nominal_mode;
1989 enum machine_mode passed_mode;
1990 enum machine_mode promoted_mode;
1991 struct locate_and_pad_arg_data locate;
1992 int partial;
1993 BOOL_BITFIELD named_arg : 1;
1994 BOOL_BITFIELD last_named : 1;
1995 BOOL_BITFIELD passed_pointer : 1;
1996 BOOL_BITFIELD on_stack : 1;
1997 BOOL_BITFIELD loaded_in_reg : 1;
2000 /* A subroutine of assign_parms. Initialize ALL. */
2002 static void
2003 assign_parms_initialize_all (struct assign_parm_data_all *all)
2005 tree fntype;
2007 memset (all, 0, sizeof (*all));
2009 fntype = TREE_TYPE (current_function_decl);
2011 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2012 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2013 #else
2014 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2015 current_function_decl, -1);
2016 #endif
2018 #ifdef REG_PARM_STACK_SPACE
2019 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2020 #endif
2023 /* If ARGS contains entries with complex types, split the entry into two
2024 entries of the component type. Return a new list of substitutions are
2025 needed, else the old list. */
2027 static tree
2028 split_complex_args (tree args)
2030 tree p;
2032 /* Before allocating memory, check for the common case of no complex. */
2033 for (p = args; p; p = TREE_CHAIN (p))
2035 tree type = TREE_TYPE (p);
2036 if (TREE_CODE (type) == COMPLEX_TYPE
2037 && targetm.calls.split_complex_arg (type))
2038 goto found;
2040 return args;
2042 found:
2043 args = copy_list (args);
2045 for (p = args; p; p = TREE_CHAIN (p))
2047 tree type = TREE_TYPE (p);
2048 if (TREE_CODE (type) == COMPLEX_TYPE
2049 && targetm.calls.split_complex_arg (type))
2051 tree decl;
2052 tree subtype = TREE_TYPE (type);
2054 /* Rewrite the PARM_DECL's type with its component. */
2055 TREE_TYPE (p) = subtype;
2056 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2057 DECL_MODE (p) = VOIDmode;
2058 DECL_SIZE (p) = NULL;
2059 DECL_SIZE_UNIT (p) = NULL;
2060 layout_decl (p, 0);
2062 /* Build a second synthetic decl. */
2063 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2064 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2065 layout_decl (decl, 0);
2067 /* Splice it in; skip the new decl. */
2068 TREE_CHAIN (decl) = TREE_CHAIN (p);
2069 TREE_CHAIN (p) = decl;
2070 p = decl;
2074 return args;
2077 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2078 the hidden struct return argument, and (abi willing) complex args.
2079 Return the new parameter list. */
2081 static tree
2082 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2084 tree fndecl = current_function_decl;
2085 tree fntype = TREE_TYPE (fndecl);
2086 tree fnargs = DECL_ARGUMENTS (fndecl);
2088 /* If struct value address is treated as the first argument, make it so. */
2089 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2090 && ! current_function_returns_pcc_struct
2091 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2093 tree type = build_pointer_type (TREE_TYPE (fntype));
2094 tree decl;
2096 decl = build_decl (PARM_DECL, NULL_TREE, type);
2097 DECL_ARG_TYPE (decl) = type;
2098 DECL_ARTIFICIAL (decl) = 1;
2100 TREE_CHAIN (decl) = fnargs;
2101 fnargs = decl;
2102 all->function_result_decl = decl;
2105 all->orig_fnargs = fnargs;
2107 /* If the target wants to split complex arguments into scalars, do so. */
2108 if (targetm.calls.split_complex_arg)
2109 fnargs = split_complex_args (fnargs);
2111 return fnargs;
2114 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2115 data for the parameter. Incorporate ABI specifics such as pass-by-
2116 reference and type promotion. */
2118 static void
2119 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2120 struct assign_parm_data_one *data)
2122 tree nominal_type, passed_type;
2123 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2125 memset (data, 0, sizeof (*data));
2127 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2128 if (current_function_stdarg)
2130 tree tem;
2131 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2132 if (DECL_NAME (tem))
2133 break;
2134 if (tem == 0)
2135 data->last_named = true;
2138 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2139 most machines, if this is a varargs/stdarg function, then we treat
2140 the last named arg as if it were anonymous too. */
2141 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2142 data->named_arg = 1;
2143 else
2144 data->named_arg = !data->last_named;
2146 nominal_type = TREE_TYPE (parm);
2147 passed_type = DECL_ARG_TYPE (parm);
2149 /* Look out for errors propagating this far. Also, if the parameter's
2150 type is void then its value doesn't matter. */
2151 if (TREE_TYPE (parm) == error_mark_node
2152 /* This can happen after weird syntax errors
2153 or if an enum type is defined among the parms. */
2154 || TREE_CODE (parm) != PARM_DECL
2155 || passed_type == NULL
2156 || VOID_TYPE_P (nominal_type))
2158 nominal_type = passed_type = void_type_node;
2159 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2160 goto egress;
2163 /* Find mode of arg as it is passed, and mode of arg as it should be
2164 during execution of this function. */
2165 passed_mode = TYPE_MODE (passed_type);
2166 nominal_mode = TYPE_MODE (nominal_type);
2168 /* If the parm is to be passed as a transparent union, use the type of
2169 the first field for the tests below. We have already verified that
2170 the modes are the same. */
2171 if (DECL_TRANSPARENT_UNION (parm)
2172 || (TREE_CODE (passed_type) == UNION_TYPE
2173 && TYPE_TRANSPARENT_UNION (passed_type)))
2174 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2176 /* See if this arg was passed by invisible reference. */
2177 if (pass_by_reference (&all->args_so_far, passed_mode,
2178 passed_type, data->named_arg))
2180 passed_type = nominal_type = build_pointer_type (passed_type);
2181 data->passed_pointer = true;
2182 passed_mode = nominal_mode = Pmode;
2185 /* Find mode as it is passed by the ABI. */
2186 promoted_mode = passed_mode;
2187 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2189 int unsignedp = TYPE_UNSIGNED (passed_type);
2190 promoted_mode = promote_mode (passed_type, promoted_mode,
2191 &unsignedp, 1);
2194 egress:
2195 data->nominal_type = nominal_type;
2196 data->passed_type = passed_type;
2197 data->nominal_mode = nominal_mode;
2198 data->passed_mode = passed_mode;
2199 data->promoted_mode = promoted_mode;
2202 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2204 static void
2205 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2206 struct assign_parm_data_one *data, bool no_rtl)
2208 int varargs_pretend_bytes = 0;
2210 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2211 data->promoted_mode,
2212 data->passed_type,
2213 &varargs_pretend_bytes, no_rtl);
2215 /* If the back-end has requested extra stack space, record how much is
2216 needed. Do not change pretend_args_size otherwise since it may be
2217 nonzero from an earlier partial argument. */
2218 if (varargs_pretend_bytes > 0)
2219 all->pretend_args_size = varargs_pretend_bytes;
2222 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2223 the incoming location of the current parameter. */
2225 static void
2226 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2227 struct assign_parm_data_one *data)
2229 HOST_WIDE_INT pretend_bytes = 0;
2230 rtx entry_parm;
2231 bool in_regs;
2233 if (data->promoted_mode == VOIDmode)
2235 data->entry_parm = data->stack_parm = const0_rtx;
2236 return;
2239 #ifdef FUNCTION_INCOMING_ARG
2240 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2241 data->passed_type, data->named_arg);
2242 #else
2243 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2244 data->passed_type, data->named_arg);
2245 #endif
2247 if (entry_parm == 0)
2248 data->promoted_mode = data->passed_mode;
2250 /* Determine parm's home in the stack, in case it arrives in the stack
2251 or we should pretend it did. Compute the stack position and rtx where
2252 the argument arrives and its size.
2254 There is one complexity here: If this was a parameter that would
2255 have been passed in registers, but wasn't only because it is
2256 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2257 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2258 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2259 as it was the previous time. */
2260 in_regs = entry_parm != 0;
2261 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2262 in_regs = true;
2263 #endif
2264 if (!in_regs && !data->named_arg)
2266 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2268 rtx tem;
2269 #ifdef FUNCTION_INCOMING_ARG
2270 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2271 data->passed_type, true);
2272 #else
2273 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2274 data->passed_type, true);
2275 #endif
2276 in_regs = tem != NULL;
2280 /* If this parameter was passed both in registers and in the stack, use
2281 the copy on the stack. */
2282 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2283 data->passed_type))
2284 entry_parm = 0;
2286 if (entry_parm)
2288 int partial;
2290 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2291 data->promoted_mode,
2292 data->passed_type,
2293 data->named_arg);
2294 data->partial = partial;
2296 /* The caller might already have allocated stack space for the
2297 register parameters. */
2298 if (partial != 0 && all->reg_parm_stack_space == 0)
2300 /* Part of this argument is passed in registers and part
2301 is passed on the stack. Ask the prologue code to extend
2302 the stack part so that we can recreate the full value.
2304 PRETEND_BYTES is the size of the registers we need to store.
2305 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2306 stack space that the prologue should allocate.
2308 Internally, gcc assumes that the argument pointer is aligned
2309 to STACK_BOUNDARY bits. This is used both for alignment
2310 optimizations (see init_emit) and to locate arguments that are
2311 aligned to more than PARM_BOUNDARY bits. We must preserve this
2312 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2313 a stack boundary. */
2315 /* We assume at most one partial arg, and it must be the first
2316 argument on the stack. */
2317 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2319 pretend_bytes = partial * UNITS_PER_WORD;
2320 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2322 /* We want to align relative to the actual stack pointer, so
2323 don't include this in the stack size until later. */
2324 all->extra_pretend_bytes = all->pretend_args_size;
2328 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2329 entry_parm ? data->partial : 0, current_function_decl,
2330 &all->stack_args_size, &data->locate);
2332 /* Adjust offsets to include the pretend args. */
2333 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2334 data->locate.slot_offset.constant += pretend_bytes;
2335 data->locate.offset.constant += pretend_bytes;
2337 data->entry_parm = entry_parm;
2340 /* A subroutine of assign_parms. If there is actually space on the stack
2341 for this parm, count it in stack_args_size and return true. */
2343 static bool
2344 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2345 struct assign_parm_data_one *data)
2347 /* Trivially true if we've no incoming register. */
2348 if (data->entry_parm == NULL)
2350 /* Also true if we're partially in registers and partially not,
2351 since we've arranged to drop the entire argument on the stack. */
2352 else if (data->partial != 0)
2354 /* Also true if the target says that it's passed in both registers
2355 and on the stack. */
2356 else if (GET_CODE (data->entry_parm) == PARALLEL
2357 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2359 /* Also true if the target says that there's stack allocated for
2360 all register parameters. */
2361 else if (all->reg_parm_stack_space > 0)
2363 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2364 else
2365 return false;
2367 all->stack_args_size.constant += data->locate.size.constant;
2368 if (data->locate.size.var)
2369 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2371 return true;
2374 /* A subroutine of assign_parms. Given that this parameter is allocated
2375 stack space by the ABI, find it. */
2377 static void
2378 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2380 rtx offset_rtx, stack_parm;
2381 unsigned int align, boundary;
2383 /* If we're passing this arg using a reg, make its stack home the
2384 aligned stack slot. */
2385 if (data->entry_parm)
2386 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2387 else
2388 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2390 stack_parm = current_function_internal_arg_pointer;
2391 if (offset_rtx != const0_rtx)
2392 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2393 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2395 set_mem_attributes (stack_parm, parm, 1);
2397 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2398 align = 0;
2400 /* If we're padding upward, we know that the alignment of the slot
2401 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2402 intentionally forcing upward padding. Otherwise we have to come
2403 up with a guess at the alignment based on OFFSET_RTX. */
2404 if (data->locate.where_pad == upward || data->entry_parm)
2405 align = boundary;
2406 else if (GET_CODE (offset_rtx) == CONST_INT)
2408 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2409 align = align & -align;
2411 if (align > 0)
2412 set_mem_align (stack_parm, align);
2414 if (data->entry_parm)
2415 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2417 data->stack_parm = stack_parm;
2420 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2421 always valid and contiguous. */
2423 static void
2424 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2426 rtx entry_parm = data->entry_parm;
2427 rtx stack_parm = data->stack_parm;
2429 /* If this parm was passed part in regs and part in memory, pretend it
2430 arrived entirely in memory by pushing the register-part onto the stack.
2431 In the special case of a DImode or DFmode that is split, we could put
2432 it together in a pseudoreg directly, but for now that's not worth
2433 bothering with. */
2434 if (data->partial != 0)
2436 /* Handle calls that pass values in multiple non-contiguous
2437 locations. The Irix 6 ABI has examples of this. */
2438 if (GET_CODE (entry_parm) == PARALLEL)
2439 emit_group_store (validize_mem (stack_parm), entry_parm,
2440 data->passed_type,
2441 int_size_in_bytes (data->passed_type));
2442 else
2443 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2444 data->partial);
2446 entry_parm = stack_parm;
2449 /* If we didn't decide this parm came in a register, by default it came
2450 on the stack. */
2451 else if (entry_parm == NULL)
2452 entry_parm = stack_parm;
2454 /* When an argument is passed in multiple locations, we can't make use
2455 of this information, but we can save some copying if the whole argument
2456 is passed in a single register. */
2457 else if (GET_CODE (entry_parm) == PARALLEL
2458 && data->nominal_mode != BLKmode
2459 && data->passed_mode != BLKmode)
2461 size_t i, len = XVECLEN (entry_parm, 0);
2463 for (i = 0; i < len; i++)
2464 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2465 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2466 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2467 == data->passed_mode)
2468 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2470 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2471 break;
2475 data->entry_parm = entry_parm;
2478 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2479 always valid and properly aligned. */
2482 static void
2483 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2485 rtx stack_parm = data->stack_parm;
2487 /* If we can't trust the parm stack slot to be aligned enough for its
2488 ultimate type, don't use that slot after entry. We'll make another
2489 stack slot, if we need one. */
2490 if (STRICT_ALIGNMENT && stack_parm
2491 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2492 stack_parm = NULL;
2494 /* If parm was passed in memory, and we need to convert it on entry,
2495 don't store it back in that same slot. */
2496 else if (data->entry_parm == stack_parm
2497 && data->nominal_mode != BLKmode
2498 && data->nominal_mode != data->passed_mode)
2499 stack_parm = NULL;
2501 data->stack_parm = stack_parm;
2504 /* A subroutine of assign_parms. Return true if the current parameter
2505 should be stored as a BLKmode in the current frame. */
2507 static bool
2508 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2510 if (data->nominal_mode == BLKmode)
2511 return true;
2512 if (GET_CODE (data->entry_parm) == PARALLEL)
2513 return true;
2515 #ifdef BLOCK_REG_PADDING
2516 if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2517 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
2518 return true;
2519 #endif
2521 return false;
2524 /* A subroutine of assign_parms. Arrange for the parameter to be
2525 present and valid in DATA->STACK_RTL. */
2527 static void
2528 assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
2530 rtx entry_parm = data->entry_parm;
2531 rtx stack_parm = data->stack_parm;
2533 /* If we've a non-block object that's nevertheless passed in parts,
2534 reconstitute it in register operations rather than on the stack. */
2535 if (GET_CODE (entry_parm) == PARALLEL
2536 && data->nominal_mode != BLKmode
2537 && XVECLEN (entry_parm, 0) > 1
2538 && use_register_for_decl (parm))
2540 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2542 emit_group_store (parmreg, entry_parm, data->nominal_type,
2543 int_size_in_bytes (data->nominal_type));
2544 SET_DECL_RTL (parm, parmreg);
2545 return;
2548 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2549 calls that pass values in multiple non-contiguous locations. */
2550 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2552 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2553 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2554 rtx mem;
2556 /* Note that we will be storing an integral number of words.
2557 So we have to be careful to ensure that we allocate an
2558 integral number of words. We do this below in the
2559 assign_stack_local if space was not allocated in the argument
2560 list. If it was, this will not work if PARM_BOUNDARY is not
2561 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2562 if it becomes a problem. Exception is when BLKmode arrives
2563 with arguments not conforming to word_mode. */
2565 if (stack_parm == 0)
2567 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2568 data->stack_parm = stack_parm;
2569 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2570 set_mem_attributes (stack_parm, parm, 1);
2572 else if (GET_CODE (entry_parm) == PARALLEL)
2574 else
2575 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2577 mem = validize_mem (stack_parm);
2579 /* Handle values in multiple non-contiguous locations. */
2580 if (GET_CODE (entry_parm) == PARALLEL)
2581 emit_group_store (mem, entry_parm, data->passed_type, size);
2583 else if (size == 0)
2586 /* If SIZE is that of a mode no bigger than a word, just use
2587 that mode's store operation. */
2588 else if (size <= UNITS_PER_WORD)
2590 enum machine_mode mode
2591 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2593 if (mode != BLKmode
2594 #ifdef BLOCK_REG_PADDING
2595 && (size == UNITS_PER_WORD
2596 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2597 != (BYTES_BIG_ENDIAN ? upward : downward)))
2598 #endif
2601 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2602 emit_move_insn (change_address (mem, mode, 0), reg);
2605 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2606 machine must be aligned to the left before storing
2607 to memory. Note that the previous test doesn't
2608 handle all cases (e.g. SIZE == 3). */
2609 else if (size != UNITS_PER_WORD
2610 #ifdef BLOCK_REG_PADDING
2611 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2612 == downward)
2613 #else
2614 && BYTES_BIG_ENDIAN
2615 #endif
2618 rtx tem, x;
2619 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2620 rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
2622 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2623 build_int_cst (NULL_TREE, by),
2624 NULL_RTX, 1);
2625 tem = change_address (mem, word_mode, 0);
2626 emit_move_insn (tem, x);
2628 else
2629 move_block_from_reg (REGNO (data->entry_parm), mem,
2630 size_stored / UNITS_PER_WORD);
2632 else
2633 move_block_from_reg (REGNO (data->entry_parm), mem,
2634 size_stored / UNITS_PER_WORD);
2637 SET_DECL_RTL (parm, stack_parm);
2640 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2641 parameter. Get it there. Perform all ABI specified conversions. */
2643 static void
2644 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2645 struct assign_parm_data_one *data)
2647 rtx parmreg;
2648 enum machine_mode promoted_nominal_mode;
2649 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2650 bool did_conversion = false;
2652 /* Store the parm in a pseudoregister during the function, but we may
2653 need to do it in a wider mode. */
2655 promoted_nominal_mode
2656 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2658 parmreg = gen_reg_rtx (promoted_nominal_mode);
2660 if (!DECL_ARTIFICIAL (parm))
2661 mark_user_reg (parmreg);
2663 /* If this was an item that we received a pointer to,
2664 set DECL_RTL appropriately. */
2665 if (data->passed_pointer)
2667 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2668 set_mem_attributes (x, parm, 1);
2669 SET_DECL_RTL (parm, x);
2671 else
2672 SET_DECL_RTL (parm, parmreg);
2674 /* Copy the value into the register. */
2675 if (data->nominal_mode != data->passed_mode
2676 || promoted_nominal_mode != data->promoted_mode)
2678 int save_tree_used;
2680 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2681 mode, by the caller. We now have to convert it to
2682 NOMINAL_MODE, if different. However, PARMREG may be in
2683 a different mode than NOMINAL_MODE if it is being stored
2684 promoted.
2686 If ENTRY_PARM is a hard register, it might be in a register
2687 not valid for operating in its mode (e.g., an odd-numbered
2688 register for a DFmode). In that case, moves are the only
2689 thing valid, so we can't do a convert from there. This
2690 occurs when the calling sequence allow such misaligned
2691 usages.
2693 In addition, the conversion may involve a call, which could
2694 clobber parameters which haven't been copied to pseudo
2695 registers yet. Therefore, we must first copy the parm to
2696 a pseudo reg here, and save the conversion until after all
2697 parameters have been moved. */
2699 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2701 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2703 push_to_sequence (all->conversion_insns);
2704 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2706 if (GET_CODE (tempreg) == SUBREG
2707 && GET_MODE (tempreg) == data->nominal_mode
2708 && REG_P (SUBREG_REG (tempreg))
2709 && data->nominal_mode == data->passed_mode
2710 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2711 && GET_MODE_SIZE (GET_MODE (tempreg))
2712 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2714 /* The argument is already sign/zero extended, so note it
2715 into the subreg. */
2716 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2717 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2720 /* TREE_USED gets set erroneously during expand_assignment. */
2721 save_tree_used = TREE_USED (parm);
2722 expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
2723 TREE_USED (parm) = save_tree_used;
2724 all->conversion_insns = get_insns ();
2725 end_sequence ();
2727 did_conversion = true;
2729 else
2730 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2732 /* If we were passed a pointer but the actual value can safely live
2733 in a register, put it in one. */
2734 if (data->passed_pointer
2735 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2736 /* If by-reference argument was promoted, demote it. */
2737 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2738 || use_register_for_decl (parm)))
2740 /* We can't use nominal_mode, because it will have been set to
2741 Pmode above. We must use the actual mode of the parm. */
2742 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2743 mark_user_reg (parmreg);
2745 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2747 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2748 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2750 push_to_sequence (all->conversion_insns);
2751 emit_move_insn (tempreg, DECL_RTL (parm));
2752 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2753 emit_move_insn (parmreg, tempreg);
2754 all->conversion_insns = get_insns();
2755 end_sequence ();
2757 did_conversion = true;
2759 else
2760 emit_move_insn (parmreg, DECL_RTL (parm));
2762 SET_DECL_RTL (parm, parmreg);
2764 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2765 now the parm. */
2766 data->stack_parm = NULL;
2769 /* If we are passed an arg by reference and it is our responsibility
2770 to make a copy, do it now.
2771 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2772 original argument, so we must recreate them in the call to
2773 FUNCTION_ARG_CALLEE_COPIES. */
2774 /* ??? Later add code to handle the case that if the argument isn't
2775 modified, don't do the copy. */
2777 else if (data->passed_pointer)
2779 tree type = TREE_TYPE (data->passed_type);
2781 if (reference_callee_copied (&all->args_so_far, TYPE_MODE (type),
2782 type, data->named_arg))
2784 rtx copy;
2786 /* This sequence may involve a library call perhaps clobbering
2787 registers that haven't been copied to pseudos yet. */
2789 push_to_sequence (all->conversion_insns);
2791 if (!COMPLETE_TYPE_P (type)
2792 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2794 /* This is a variable sized object. */
2795 copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
2796 TYPE_ALIGN (type));
2797 copy = gen_rtx_MEM (BLKmode, copy);
2799 else
2800 copy = assign_stack_temp (TYPE_MODE (type),
2801 int_size_in_bytes (type), 1);
2802 set_mem_attributes (copy, parm, 1);
2804 store_expr (parm, copy, 0);
2805 emit_move_insn (parmreg, XEXP (copy, 0));
2806 all->conversion_insns = get_insns ();
2807 end_sequence ();
2809 did_conversion = true;
2813 /* Mark the register as eliminable if we did no conversion and it was
2814 copied from memory at a fixed offset, and the arg pointer was not
2815 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2816 offset formed an invalid address, such memory-equivalences as we
2817 make here would screw up life analysis for it. */
2818 if (data->nominal_mode == data->passed_mode
2819 && !did_conversion
2820 && data->stack_parm != 0
2821 && MEM_P (data->stack_parm)
2822 && data->locate.offset.var == 0
2823 && reg_mentioned_p (virtual_incoming_args_rtx,
2824 XEXP (data->stack_parm, 0)))
2826 rtx linsn = get_last_insn ();
2827 rtx sinsn, set;
2829 /* Mark complex types separately. */
2830 if (GET_CODE (parmreg) == CONCAT)
2832 enum machine_mode submode
2833 = GET_MODE_INNER (GET_MODE (parmreg));
2834 int regnor = REGNO (gen_realpart (submode, parmreg));
2835 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2836 rtx stackr = gen_realpart (submode, data->stack_parm);
2837 rtx stacki = gen_imagpart (submode, data->stack_parm);
2839 /* Scan backwards for the set of the real and
2840 imaginary parts. */
2841 for (sinsn = linsn; sinsn != 0;
2842 sinsn = prev_nonnote_insn (sinsn))
2844 set = single_set (sinsn);
2845 if (set == 0)
2846 continue;
2848 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2849 REG_NOTES (sinsn)
2850 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2851 REG_NOTES (sinsn));
2852 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2853 REG_NOTES (sinsn)
2854 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2855 REG_NOTES (sinsn));
2858 else if ((set = single_set (linsn)) != 0
2859 && SET_DEST (set) == parmreg)
2860 REG_NOTES (linsn)
2861 = gen_rtx_EXPR_LIST (REG_EQUIV,
2862 data->stack_parm, REG_NOTES (linsn));
2865 /* For pointer data type, suggest pointer register. */
2866 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2867 mark_reg_pointer (parmreg,
2868 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2871 /* A subroutine of assign_parms. Allocate stack space to hold the current
2872 parameter. Get it there. Perform all ABI specified conversions. */
2874 static void
2875 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2876 struct assign_parm_data_one *data)
2878 /* Value must be stored in the stack slot STACK_PARM during function
2879 execution. */
2881 if (data->promoted_mode != data->nominal_mode)
2883 /* Conversion is required. */
2884 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2886 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2888 push_to_sequence (all->conversion_insns);
2889 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2890 TYPE_UNSIGNED (TREE_TYPE (parm)));
2892 if (data->stack_parm)
2893 /* ??? This may need a big-endian conversion on sparc64. */
2894 data->stack_parm
2895 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2897 all->conversion_insns = get_insns ();
2898 end_sequence ();
2901 if (data->entry_parm != data->stack_parm)
2903 if (data->stack_parm == 0)
2905 data->stack_parm
2906 = assign_stack_local (GET_MODE (data->entry_parm),
2907 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2909 set_mem_attributes (data->stack_parm, parm, 1);
2912 if (data->promoted_mode != data->nominal_mode)
2914 push_to_sequence (all->conversion_insns);
2915 emit_move_insn (validize_mem (data->stack_parm),
2916 validize_mem (data->entry_parm));
2917 all->conversion_insns = get_insns ();
2918 end_sequence ();
2920 else
2921 emit_move_insn (validize_mem (data->stack_parm),
2922 validize_mem (data->entry_parm));
2925 SET_DECL_RTL (parm, data->stack_parm);
2928 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2929 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2931 static void
2932 assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
2934 tree parm;
2936 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2938 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2939 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2941 rtx tmp, real, imag;
2942 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2944 real = DECL_RTL (fnargs);
2945 imag = DECL_RTL (TREE_CHAIN (fnargs));
2946 if (inner != GET_MODE (real))
2948 real = gen_lowpart_SUBREG (inner, real);
2949 imag = gen_lowpart_SUBREG (inner, imag);
2951 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2952 SET_DECL_RTL (parm, tmp);
2954 real = DECL_INCOMING_RTL (fnargs);
2955 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2956 if (inner != GET_MODE (real))
2958 real = gen_lowpart_SUBREG (inner, real);
2959 imag = gen_lowpart_SUBREG (inner, imag);
2961 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2962 set_decl_incoming_rtl (parm, tmp);
2963 fnargs = TREE_CHAIN (fnargs);
2965 else
2967 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2968 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2970 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2971 instead of the copy of decl, i.e. FNARGS. */
2972 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2973 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2976 fnargs = TREE_CHAIN (fnargs);
2980 /* Assign RTL expressions to the function's parameters. This may involve
2981 copying them into registers and using those registers as the DECL_RTL. */
2983 void
2984 assign_parms (tree fndecl)
2986 struct assign_parm_data_all all;
2987 tree fnargs, parm;
2988 rtx internal_arg_pointer;
2989 int varargs_setup = 0;
2991 /* If the reg that the virtual arg pointer will be translated into is
2992 not a fixed reg or is the stack pointer, make a copy of the virtual
2993 arg pointer, and address parms via the copy. The frame pointer is
2994 considered fixed even though it is not marked as such.
2996 The second time through, simply use ap to avoid generating rtx. */
2998 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2999 || ! (fixed_regs[ARG_POINTER_REGNUM]
3000 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3001 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3002 else
3003 internal_arg_pointer = virtual_incoming_args_rtx;
3004 current_function_internal_arg_pointer = internal_arg_pointer;
3006 assign_parms_initialize_all (&all);
3007 fnargs = assign_parms_augmented_arg_list (&all);
3009 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3011 struct assign_parm_data_one data;
3013 /* Extract the type of PARM; adjust it according to ABI. */
3014 assign_parm_find_data_types (&all, parm, &data);
3016 /* Early out for errors and void parameters. */
3017 if (data.passed_mode == VOIDmode)
3019 SET_DECL_RTL (parm, const0_rtx);
3020 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3021 continue;
3024 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3025 for the unnamed dummy argument following the last named argument.
3026 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3027 we only want to do this when we get to the actual last named
3028 argument, which will be the first time LAST_NAMED gets set. */
3029 if (data.last_named && !varargs_setup)
3031 varargs_setup = true;
3032 assign_parms_setup_varargs (&all, &data, false);
3035 /* Find out where the parameter arrives in this function. */
3036 assign_parm_find_entry_rtl (&all, &data);
3038 /* Find out where stack space for this parameter might be. */
3039 if (assign_parm_is_stack_parm (&all, &data))
3041 assign_parm_find_stack_rtl (parm, &data);
3042 assign_parm_adjust_entry_rtl (&data);
3045 /* Record permanently how this parm was passed. */
3046 set_decl_incoming_rtl (parm, data.entry_parm);
3048 /* Update info on where next arg arrives in registers. */
3049 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3050 data.passed_type, data.named_arg);
3052 assign_parm_adjust_stack_rtl (&data);
3054 if (assign_parm_setup_block_p (&data))
3055 assign_parm_setup_block (parm, &data);
3056 else if (data.passed_pointer || use_register_for_decl (parm))
3057 assign_parm_setup_reg (&all, parm, &data);
3058 else
3059 assign_parm_setup_stack (&all, parm, &data);
3062 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3063 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3065 /* Output all parameter conversion instructions (possibly including calls)
3066 now that all parameters have been copied out of hard registers. */
3067 emit_insn (all.conversion_insns);
3069 /* If we are receiving a struct value address as the first argument, set up
3070 the RTL for the function result. As this might require code to convert
3071 the transmitted address to Pmode, we do this here to ensure that possible
3072 preliminary conversions of the address have been emitted already. */
3073 if (all.function_result_decl)
3075 tree result = DECL_RESULT (current_function_decl);
3076 rtx addr = DECL_RTL (all.function_result_decl);
3077 rtx x;
3079 if (DECL_BY_REFERENCE (result))
3080 x = addr;
3081 else
3083 addr = convert_memory_address (Pmode, addr);
3084 x = gen_rtx_MEM (DECL_MODE (result), addr);
3085 set_mem_attributes (x, result, 1);
3087 SET_DECL_RTL (result, x);
3090 /* We have aligned all the args, so add space for the pretend args. */
3091 current_function_pretend_args_size = all.pretend_args_size;
3092 all.stack_args_size.constant += all.extra_pretend_bytes;
3093 current_function_args_size = all.stack_args_size.constant;
3095 /* Adjust function incoming argument size for alignment and
3096 minimum length. */
3098 #ifdef REG_PARM_STACK_SPACE
3099 current_function_args_size = MAX (current_function_args_size,
3100 REG_PARM_STACK_SPACE (fndecl));
3101 #endif
3103 current_function_args_size
3104 = ((current_function_args_size + STACK_BYTES - 1)
3105 / STACK_BYTES) * STACK_BYTES;
3107 #ifdef ARGS_GROW_DOWNWARD
3108 current_function_arg_offset_rtx
3109 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3110 : expand_expr (size_diffop (all.stack_args_size.var,
3111 size_int (-all.stack_args_size.constant)),
3112 NULL_RTX, VOIDmode, 0));
3113 #else
3114 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3115 #endif
3117 /* See how many bytes, if any, of its args a function should try to pop
3118 on return. */
3120 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3121 current_function_args_size);
3123 /* For stdarg.h function, save info about
3124 regs and stack space used by the named args. */
3126 current_function_args_info = all.args_so_far;
3128 /* Set the rtx used for the function return value. Put this in its
3129 own variable so any optimizers that need this information don't have
3130 to include tree.h. Do this here so it gets done when an inlined
3131 function gets output. */
3133 current_function_return_rtx
3134 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3135 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3137 /* If scalar return value was computed in a pseudo-reg, or was a named
3138 return value that got dumped to the stack, copy that to the hard
3139 return register. */
3140 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3142 tree decl_result = DECL_RESULT (fndecl);
3143 rtx decl_rtl = DECL_RTL (decl_result);
3145 if (REG_P (decl_rtl)
3146 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3147 : DECL_REGISTER (decl_result))
3149 rtx real_decl_rtl;
3151 #ifdef FUNCTION_OUTGOING_VALUE
3152 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3153 fndecl);
3154 #else
3155 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3156 fndecl);
3157 #endif
3158 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3159 /* The delay slot scheduler assumes that current_function_return_rtx
3160 holds the hard register containing the return value, not a
3161 temporary pseudo. */
3162 current_function_return_rtx = real_decl_rtl;
3167 /* Indicate whether REGNO is an incoming argument to the current function
3168 that was promoted to a wider mode. If so, return the RTX for the
3169 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3170 that REGNO is promoted from and whether the promotion was signed or
3171 unsigned. */
3174 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3176 tree arg;
3178 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3179 arg = TREE_CHAIN (arg))
3180 if (REG_P (DECL_INCOMING_RTL (arg))
3181 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3182 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3185 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3187 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3188 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3189 && mode != DECL_MODE (arg))
3191 *pmode = DECL_MODE (arg);
3192 *punsignedp = unsignedp;
3193 return DECL_INCOMING_RTL (arg);
3197 return 0;
3201 /* Compute the size and offset from the start of the stacked arguments for a
3202 parm passed in mode PASSED_MODE and with type TYPE.
3204 INITIAL_OFFSET_PTR points to the current offset into the stacked
3205 arguments.
3207 The starting offset and size for this parm are returned in
3208 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3209 nonzero, the offset is that of stack slot, which is returned in
3210 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3211 padding required from the initial offset ptr to the stack slot.
3213 IN_REGS is nonzero if the argument will be passed in registers. It will
3214 never be set if REG_PARM_STACK_SPACE is not defined.
3216 FNDECL is the function in which the argument was defined.
3218 There are two types of rounding that are done. The first, controlled by
3219 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3220 list to be aligned to the specific boundary (in bits). This rounding
3221 affects the initial and starting offsets, but not the argument size.
3223 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3224 optionally rounds the size of the parm to PARM_BOUNDARY. The
3225 initial offset is not affected by this rounding, while the size always
3226 is and the starting offset may be. */
3228 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3229 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3230 callers pass in the total size of args so far as
3231 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3233 void
3234 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3235 int partial, tree fndecl ATTRIBUTE_UNUSED,
3236 struct args_size *initial_offset_ptr,
3237 struct locate_and_pad_arg_data *locate)
3239 tree sizetree;
3240 enum direction where_pad;
3241 int boundary;
3242 int reg_parm_stack_space = 0;
3243 int part_size_in_regs;
3245 #ifdef REG_PARM_STACK_SPACE
3246 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3248 /* If we have found a stack parm before we reach the end of the
3249 area reserved for registers, skip that area. */
3250 if (! in_regs)
3252 if (reg_parm_stack_space > 0)
3254 if (initial_offset_ptr->var)
3256 initial_offset_ptr->var
3257 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3258 ssize_int (reg_parm_stack_space));
3259 initial_offset_ptr->constant = 0;
3261 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3262 initial_offset_ptr->constant = reg_parm_stack_space;
3265 #endif /* REG_PARM_STACK_SPACE */
3267 part_size_in_regs = 0;
3268 if (reg_parm_stack_space == 0)
3269 part_size_in_regs = ((partial * UNITS_PER_WORD)
3270 / (PARM_BOUNDARY / BITS_PER_UNIT)
3271 * (PARM_BOUNDARY / BITS_PER_UNIT));
3273 sizetree
3274 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3275 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3276 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3277 locate->where_pad = where_pad;
3279 #ifdef ARGS_GROW_DOWNWARD
3280 locate->slot_offset.constant = -initial_offset_ptr->constant;
3281 if (initial_offset_ptr->var)
3282 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3283 initial_offset_ptr->var);
3286 tree s2 = sizetree;
3287 if (where_pad != none
3288 && (!host_integerp (sizetree, 1)
3289 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3290 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3291 SUB_PARM_SIZE (locate->slot_offset, s2);
3294 locate->slot_offset.constant += part_size_in_regs;
3296 if (!in_regs
3297 #ifdef REG_PARM_STACK_SPACE
3298 || REG_PARM_STACK_SPACE (fndecl) > 0
3299 #endif
3301 pad_to_arg_alignment (&locate->slot_offset, boundary,
3302 &locate->alignment_pad);
3304 locate->size.constant = (-initial_offset_ptr->constant
3305 - locate->slot_offset.constant);
3306 if (initial_offset_ptr->var)
3307 locate->size.var = size_binop (MINUS_EXPR,
3308 size_binop (MINUS_EXPR,
3309 ssize_int (0),
3310 initial_offset_ptr->var),
3311 locate->slot_offset.var);
3313 /* Pad_below needs the pre-rounded size to know how much to pad
3314 below. */
3315 locate->offset = locate->slot_offset;
3316 if (where_pad == downward)
3317 pad_below (&locate->offset, passed_mode, sizetree);
3319 #else /* !ARGS_GROW_DOWNWARD */
3320 if (!in_regs
3321 #ifdef REG_PARM_STACK_SPACE
3322 || REG_PARM_STACK_SPACE (fndecl) > 0
3323 #endif
3325 pad_to_arg_alignment (initial_offset_ptr, boundary,
3326 &locate->alignment_pad);
3327 locate->slot_offset = *initial_offset_ptr;
3329 #ifdef PUSH_ROUNDING
3330 if (passed_mode != BLKmode)
3331 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3332 #endif
3334 /* Pad_below needs the pre-rounded size to know how much to pad below
3335 so this must be done before rounding up. */
3336 locate->offset = locate->slot_offset;
3337 if (where_pad == downward)
3338 pad_below (&locate->offset, passed_mode, sizetree);
3340 if (where_pad != none
3341 && (!host_integerp (sizetree, 1)
3342 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3343 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3345 ADD_PARM_SIZE (locate->size, sizetree);
3347 locate->size.constant -= part_size_in_regs;
3348 #endif /* ARGS_GROW_DOWNWARD */
3351 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3352 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3354 static void
3355 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3356 struct args_size *alignment_pad)
3358 tree save_var = NULL_TREE;
3359 HOST_WIDE_INT save_constant = 0;
3360 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3361 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3363 #ifdef SPARC_STACK_BOUNDARY_HACK
3364 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3365 higher than the real alignment of %sp. However, when it does this,
3366 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3367 This is a temporary hack while the sparc port is fixed. */
3368 if (SPARC_STACK_BOUNDARY_HACK)
3369 sp_offset = 0;
3370 #endif
3372 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3374 save_var = offset_ptr->var;
3375 save_constant = offset_ptr->constant;
3378 alignment_pad->var = NULL_TREE;
3379 alignment_pad->constant = 0;
3381 if (boundary > BITS_PER_UNIT)
3383 if (offset_ptr->var)
3385 tree sp_offset_tree = ssize_int (sp_offset);
3386 tree offset = size_binop (PLUS_EXPR,
3387 ARGS_SIZE_TREE (*offset_ptr),
3388 sp_offset_tree);
3389 #ifdef ARGS_GROW_DOWNWARD
3390 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3391 #else
3392 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3393 #endif
3395 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3396 /* ARGS_SIZE_TREE includes constant term. */
3397 offset_ptr->constant = 0;
3398 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3399 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3400 save_var);
3402 else
3404 offset_ptr->constant = -sp_offset +
3405 #ifdef ARGS_GROW_DOWNWARD
3406 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3407 #else
3408 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3409 #endif
3410 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3411 alignment_pad->constant = offset_ptr->constant - save_constant;
3416 static void
3417 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3419 if (passed_mode != BLKmode)
3421 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3422 offset_ptr->constant
3423 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3424 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3425 - GET_MODE_SIZE (passed_mode));
3427 else
3429 if (TREE_CODE (sizetree) != INTEGER_CST
3430 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3432 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3433 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3434 /* Add it in. */
3435 ADD_PARM_SIZE (*offset_ptr, s2);
3436 SUB_PARM_SIZE (*offset_ptr, sizetree);
3441 /* Walk the tree of blocks describing the binding levels within a function
3442 and warn about variables the might be killed by setjmp or vfork.
3443 This is done after calling flow_analysis and before global_alloc
3444 clobbers the pseudo-regs to hard regs. */
3446 void
3447 setjmp_vars_warning (tree block)
3449 tree decl, sub;
3451 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3453 if (TREE_CODE (decl) == VAR_DECL
3454 && DECL_RTL_SET_P (decl)
3455 && REG_P (DECL_RTL (decl))
3456 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3457 warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
3458 " or %<vfork%>",
3459 decl, decl);
3462 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3463 setjmp_vars_warning (sub);
3466 /* Do the appropriate part of setjmp_vars_warning
3467 but for arguments instead of local variables. */
3469 void
3470 setjmp_args_warning (void)
3472 tree decl;
3473 for (decl = DECL_ARGUMENTS (current_function_decl);
3474 decl; decl = TREE_CHAIN (decl))
3475 if (DECL_RTL (decl) != 0
3476 && REG_P (DECL_RTL (decl))
3477 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3478 warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
3479 decl, decl);
3483 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3484 and create duplicate blocks. */
3485 /* ??? Need an option to either create block fragments or to create
3486 abstract origin duplicates of a source block. It really depends
3487 on what optimization has been performed. */
3489 void
3490 reorder_blocks (void)
3492 tree block = DECL_INITIAL (current_function_decl);
3493 varray_type block_stack;
3495 if (block == NULL_TREE)
3496 return;
3498 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3500 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3501 clear_block_marks (block);
3503 /* Prune the old trees away, so that they don't get in the way. */
3504 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3505 BLOCK_CHAIN (block) = NULL_TREE;
3507 /* Recreate the block tree from the note nesting. */
3508 reorder_blocks_1 (get_insns (), block, &block_stack);
3509 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3511 /* Remove deleted blocks from the block fragment chains. */
3512 reorder_fix_fragments (block);
3515 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3517 void
3518 clear_block_marks (tree block)
3520 while (block)
3522 TREE_ASM_WRITTEN (block) = 0;
3523 clear_block_marks (BLOCK_SUBBLOCKS (block));
3524 block = BLOCK_CHAIN (block);
3528 static void
3529 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3531 rtx insn;
3533 for (insn = insns; insn; insn = NEXT_INSN (insn))
3535 if (NOTE_P (insn))
3537 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3539 tree block = NOTE_BLOCK (insn);
3541 /* If we have seen this block before, that means it now
3542 spans multiple address regions. Create a new fragment. */
3543 if (TREE_ASM_WRITTEN (block))
3545 tree new_block = copy_node (block);
3546 tree origin;
3548 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3549 ? BLOCK_FRAGMENT_ORIGIN (block)
3550 : block);
3551 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3552 BLOCK_FRAGMENT_CHAIN (new_block)
3553 = BLOCK_FRAGMENT_CHAIN (origin);
3554 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3556 NOTE_BLOCK (insn) = new_block;
3557 block = new_block;
3560 BLOCK_SUBBLOCKS (block) = 0;
3561 TREE_ASM_WRITTEN (block) = 1;
3562 /* When there's only one block for the entire function,
3563 current_block == block and we mustn't do this, it
3564 will cause infinite recursion. */
3565 if (block != current_block)
3567 BLOCK_SUPERCONTEXT (block) = current_block;
3568 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3569 BLOCK_SUBBLOCKS (current_block) = block;
3570 current_block = block;
3572 VARRAY_PUSH_TREE (*p_block_stack, block);
3574 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3576 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3577 VARRAY_POP (*p_block_stack);
3578 BLOCK_SUBBLOCKS (current_block)
3579 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3580 current_block = BLOCK_SUPERCONTEXT (current_block);
3586 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3587 appears in the block tree, select one of the fragments to become
3588 the new origin block. */
3590 static void
3591 reorder_fix_fragments (tree block)
3593 while (block)
3595 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3596 tree new_origin = NULL_TREE;
3598 if (dup_origin)
3600 if (! TREE_ASM_WRITTEN (dup_origin))
3602 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3604 /* Find the first of the remaining fragments. There must
3605 be at least one -- the current block. */
3606 while (! TREE_ASM_WRITTEN (new_origin))
3607 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3608 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3611 else if (! dup_origin)
3612 new_origin = block;
3614 /* Re-root the rest of the fragments to the new origin. In the
3615 case that DUP_ORIGIN was null, that means BLOCK was the origin
3616 of a chain of fragments and we want to remove those fragments
3617 that didn't make it to the output. */
3618 if (new_origin)
3620 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3621 tree chain = *pp;
3623 while (chain)
3625 if (TREE_ASM_WRITTEN (chain))
3627 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3628 *pp = chain;
3629 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3631 chain = BLOCK_FRAGMENT_CHAIN (chain);
3633 *pp = NULL_TREE;
3636 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3637 block = BLOCK_CHAIN (block);
3641 /* Reverse the order of elements in the chain T of blocks,
3642 and return the new head of the chain (old last element). */
3644 tree
3645 blocks_nreverse (tree t)
3647 tree prev = 0, decl, next;
3648 for (decl = t; decl; decl = next)
3650 next = BLOCK_CHAIN (decl);
3651 BLOCK_CHAIN (decl) = prev;
3652 prev = decl;
3654 return prev;
3657 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3658 non-NULL, list them all into VECTOR, in a depth-first preorder
3659 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3660 blocks. */
3662 static int
3663 all_blocks (tree block, tree *vector)
3665 int n_blocks = 0;
3667 while (block)
3669 TREE_ASM_WRITTEN (block) = 0;
3671 /* Record this block. */
3672 if (vector)
3673 vector[n_blocks] = block;
3675 ++n_blocks;
3677 /* Record the subblocks, and their subblocks... */
3678 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3679 vector ? vector + n_blocks : 0);
3680 block = BLOCK_CHAIN (block);
3683 return n_blocks;
3686 /* Return a vector containing all the blocks rooted at BLOCK. The
3687 number of elements in the vector is stored in N_BLOCKS_P. The
3688 vector is dynamically allocated; it is the caller's responsibility
3689 to call `free' on the pointer returned. */
3691 static tree *
3692 get_block_vector (tree block, int *n_blocks_p)
3694 tree *block_vector;
3696 *n_blocks_p = all_blocks (block, NULL);
3697 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3698 all_blocks (block, block_vector);
3700 return block_vector;
3703 static GTY(()) int next_block_index = 2;
3705 /* Set BLOCK_NUMBER for all the blocks in FN. */
3707 void
3708 number_blocks (tree fn)
3710 int i;
3711 int n_blocks;
3712 tree *block_vector;
3714 /* For SDB and XCOFF debugging output, we start numbering the blocks
3715 from 1 within each function, rather than keeping a running
3716 count. */
3717 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3718 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3719 next_block_index = 1;
3720 #endif
3722 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3724 /* The top-level BLOCK isn't numbered at all. */
3725 for (i = 1; i < n_blocks; ++i)
3726 /* We number the blocks from two. */
3727 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3729 free (block_vector);
3731 return;
3734 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3736 tree
3737 debug_find_var_in_block_tree (tree var, tree block)
3739 tree t;
3741 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3742 if (t == var)
3743 return block;
3745 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3747 tree ret = debug_find_var_in_block_tree (var, t);
3748 if (ret)
3749 return ret;
3752 return NULL_TREE;
3755 /* Allocate a function structure for FNDECL and set its contents
3756 to the defaults. */
3758 void
3759 allocate_struct_function (tree fndecl)
3761 tree result;
3762 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3764 cfun = ggc_alloc_cleared (sizeof (struct function));
3766 cfun->stack_alignment_needed = STACK_BOUNDARY;
3767 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3769 current_function_funcdef_no = funcdef_no++;
3771 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3773 init_eh_for_function ();
3775 lang_hooks.function.init (cfun);
3776 if (init_machine_status)
3777 cfun->machine = (*init_machine_status) ();
3779 if (fndecl == NULL)
3780 return;
3782 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3783 cfun->decl = fndecl;
3785 result = DECL_RESULT (fndecl);
3786 if (aggregate_value_p (result, fndecl))
3788 #ifdef PCC_STATIC_STRUCT_RETURN
3789 current_function_returns_pcc_struct = 1;
3790 #endif
3791 current_function_returns_struct = 1;
3794 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3796 current_function_stdarg
3797 = (fntype
3798 && TYPE_ARG_TYPES (fntype) != 0
3799 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3800 != void_type_node));
3803 /* Reset cfun, and other non-struct-function variables to defaults as
3804 appropriate for emitting rtl at the start of a function. */
3806 static void
3807 prepare_function_start (tree fndecl)
3809 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3810 cfun = DECL_STRUCT_FUNCTION (fndecl);
3811 else
3812 allocate_struct_function (fndecl);
3813 init_emit ();
3814 init_varasm_status (cfun);
3815 init_expr ();
3817 cse_not_expected = ! optimize;
3819 /* Caller save not needed yet. */
3820 caller_save_needed = 0;
3822 /* We haven't done register allocation yet. */
3823 reg_renumber = 0;
3825 /* Indicate that we have not instantiated virtual registers yet. */
3826 virtuals_instantiated = 0;
3828 /* Indicate that we want CONCATs now. */
3829 generating_concat_p = 1;
3831 /* Indicate we have no need of a frame pointer yet. */
3832 frame_pointer_needed = 0;
3835 /* Initialize the rtl expansion mechanism so that we can do simple things
3836 like generate sequences. This is used to provide a context during global
3837 initialization of some passes. */
3838 void
3839 init_dummy_function_start (void)
3841 prepare_function_start (NULL);
3844 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3845 and initialize static variables for generating RTL for the statements
3846 of the function. */
3848 void
3849 init_function_start (tree subr)
3851 prepare_function_start (subr);
3853 /* Prevent ever trying to delete the first instruction of a
3854 function. Also tell final how to output a linenum before the
3855 function prologue. Note linenums could be missing, e.g. when
3856 compiling a Java .class file. */
3857 if (! DECL_IS_BUILTIN (subr))
3858 emit_line_note (DECL_SOURCE_LOCATION (subr));
3860 /* Make sure first insn is a note even if we don't want linenums.
3861 This makes sure the first insn will never be deleted.
3862 Also, final expects a note to appear there. */
3863 emit_note (NOTE_INSN_DELETED);
3865 /* Warn if this value is an aggregate type,
3866 regardless of which calling convention we are using for it. */
3867 if (warn_aggregate_return
3868 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3869 warning ("function returns an aggregate");
3872 /* Make sure all values used by the optimization passes have sane
3873 defaults. */
3874 void
3875 init_function_for_compilation (void)
3877 reg_renumber = 0;
3879 /* No prologue/epilogue insns yet. */
3880 VARRAY_GROW (prologue, 0);
3881 VARRAY_GROW (epilogue, 0);
3882 VARRAY_GROW (sibcall_epilogue, 0);
3885 /* Expand a call to __main at the beginning of a possible main function. */
3887 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3888 #undef HAS_INIT_SECTION
3889 #define HAS_INIT_SECTION
3890 #endif
3892 void
3893 expand_main_function (void)
3895 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
3896 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
3898 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
3899 rtx tmp, seq;
3901 start_sequence ();
3902 /* Forcibly align the stack. */
3903 #ifdef STACK_GROWS_DOWNWARD
3904 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
3905 stack_pointer_rtx, 1, OPTAB_WIDEN);
3906 #else
3907 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3908 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
3909 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
3910 stack_pointer_rtx, 1, OPTAB_WIDEN);
3911 #endif
3912 if (tmp != stack_pointer_rtx)
3913 emit_move_insn (stack_pointer_rtx, tmp);
3915 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
3916 tmp = force_reg (Pmode, const0_rtx);
3917 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
3918 seq = get_insns ();
3919 end_sequence ();
3921 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
3922 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
3923 break;
3924 if (tmp)
3925 emit_insn_before (seq, tmp);
3926 else
3927 emit_insn (seq);
3929 #endif
3931 #ifndef HAS_INIT_SECTION
3932 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3933 #endif
3936 /* The PENDING_SIZES represent the sizes of variable-sized types.
3937 Create RTL for the various sizes now (using temporary variables),
3938 so that we can refer to the sizes from the RTL we are generating
3939 for the current function. The PENDING_SIZES are a TREE_LIST. The
3940 TREE_VALUE of each node is a SAVE_EXPR. */
3942 void
3943 expand_pending_sizes (tree pending_sizes)
3945 tree tem;
3947 /* Evaluate now the sizes of any types declared among the arguments. */
3948 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
3949 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
3952 /* Start the RTL for a new function, and set variables used for
3953 emitting RTL.
3954 SUBR is the FUNCTION_DECL node.
3955 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3956 the function's parameters, which must be run at any return statement. */
3958 void
3959 expand_function_start (tree subr)
3961 /* Make sure volatile mem refs aren't considered
3962 valid operands of arithmetic insns. */
3963 init_recog_no_volatile ();
3965 current_function_profile
3966 = (profile_flag
3967 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
3969 current_function_limit_stack
3970 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
3972 /* Make the label for return statements to jump to. Do not special
3973 case machines with special return instructions -- they will be
3974 handled later during jump, ifcvt, or epilogue creation. */
3975 return_label = gen_label_rtx ();
3977 /* Initialize rtx used to return the value. */
3978 /* Do this before assign_parms so that we copy the struct value address
3979 before any library calls that assign parms might generate. */
3981 /* Decide whether to return the value in memory or in a register. */
3982 if (aggregate_value_p (DECL_RESULT (subr), subr))
3984 /* Returning something that won't go in a register. */
3985 rtx value_address = 0;
3987 #ifdef PCC_STATIC_STRUCT_RETURN
3988 if (current_function_returns_pcc_struct)
3990 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3991 value_address = assemble_static_space (size);
3993 else
3994 #endif
3996 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
3997 /* Expect to be passed the address of a place to store the value.
3998 If it is passed as an argument, assign_parms will take care of
3999 it. */
4000 if (sv)
4002 value_address = gen_reg_rtx (Pmode);
4003 emit_move_insn (value_address, sv);
4006 if (value_address)
4008 rtx x = value_address;
4009 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4011 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4012 set_mem_attributes (x, DECL_RESULT (subr), 1);
4014 SET_DECL_RTL (DECL_RESULT (subr), x);
4017 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4018 /* If return mode is void, this decl rtl should not be used. */
4019 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4020 else
4022 /* Compute the return values into a pseudo reg, which we will copy
4023 into the true return register after the cleanups are done. */
4025 /* In order to figure out what mode to use for the pseudo, we
4026 figure out what the mode of the eventual return register will
4027 actually be, and use that. */
4028 rtx hard_reg
4029 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
4030 subr, 1);
4032 /* Structures that are returned in registers are not aggregate_value_p,
4033 so we may see a PARALLEL or a REG. */
4034 if (REG_P (hard_reg))
4035 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
4036 else
4038 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4039 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4042 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4043 result to the real return register(s). */
4044 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4047 /* Initialize rtx for parameters and local variables.
4048 In some cases this requires emitting insns. */
4049 assign_parms (subr);
4051 /* If function gets a static chain arg, store it. */
4052 if (cfun->static_chain_decl)
4054 tree parm = cfun->static_chain_decl;
4055 rtx local = gen_reg_rtx (Pmode);
4057 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4058 SET_DECL_RTL (parm, local);
4059 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4061 emit_move_insn (local, static_chain_incoming_rtx);
4064 /* If the function receives a non-local goto, then store the
4065 bits we need to restore the frame pointer. */
4066 if (cfun->nonlocal_goto_save_area)
4068 tree t_save;
4069 rtx r_save;
4071 /* ??? We need to do this save early. Unfortunately here is
4072 before the frame variable gets declared. Help out... */
4073 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4075 t_save = build4 (ARRAY_REF, ptr_type_node,
4076 cfun->nonlocal_goto_save_area,
4077 integer_zero_node, NULL_TREE, NULL_TREE);
4078 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4079 r_save = convert_memory_address (Pmode, r_save);
4081 emit_move_insn (r_save, virtual_stack_vars_rtx);
4082 update_nonlocal_goto_save_area ();
4085 /* The following was moved from init_function_start.
4086 The move is supposed to make sdb output more accurate. */
4087 /* Indicate the beginning of the function body,
4088 as opposed to parm setup. */
4089 emit_note (NOTE_INSN_FUNCTION_BEG);
4091 if (!NOTE_P (get_last_insn ()))
4092 emit_note (NOTE_INSN_DELETED);
4093 parm_birth_insn = get_last_insn ();
4095 if (current_function_profile)
4097 #ifdef PROFILE_HOOK
4098 PROFILE_HOOK (current_function_funcdef_no);
4099 #endif
4102 /* After the display initializations is where the tail-recursion label
4103 should go, if we end up needing one. Ensure we have a NOTE here
4104 since some things (like trampolines) get placed before this. */
4105 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4107 /* Evaluate now the sizes of any types declared among the arguments. */
4108 expand_pending_sizes (nreverse (get_pending_sizes ()));
4110 /* Make sure there is a line number after the function entry setup code. */
4111 force_next_line_note ();
4114 /* Undo the effects of init_dummy_function_start. */
4115 void
4116 expand_dummy_function_end (void)
4118 /* End any sequences that failed to be closed due to syntax errors. */
4119 while (in_sequence_p ())
4120 end_sequence ();
4122 /* Outside function body, can't compute type's actual size
4123 until next function's body starts. */
4125 free_after_parsing (cfun);
4126 free_after_compilation (cfun);
4127 cfun = 0;
4130 /* Call DOIT for each hard register used as a return value from
4131 the current function. */
4133 void
4134 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4136 rtx outgoing = current_function_return_rtx;
4138 if (! outgoing)
4139 return;
4141 if (REG_P (outgoing))
4142 (*doit) (outgoing, arg);
4143 else if (GET_CODE (outgoing) == PARALLEL)
4145 int i;
4147 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4149 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4151 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4152 (*doit) (x, arg);
4157 static void
4158 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4160 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4163 void
4164 clobber_return_register (void)
4166 diddle_return_value (do_clobber_return_reg, NULL);
4168 /* In case we do use pseudo to return value, clobber it too. */
4169 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4171 tree decl_result = DECL_RESULT (current_function_decl);
4172 rtx decl_rtl = DECL_RTL (decl_result);
4173 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4175 do_clobber_return_reg (decl_rtl, NULL);
4180 static void
4181 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4183 emit_insn (gen_rtx_USE (VOIDmode, reg));
4186 void
4187 use_return_register (void)
4189 diddle_return_value (do_use_return_reg, NULL);
4192 /* Possibly warn about unused parameters. */
4193 void
4194 do_warn_unused_parameter (tree fn)
4196 tree decl;
4198 for (decl = DECL_ARGUMENTS (fn);
4199 decl; decl = TREE_CHAIN (decl))
4200 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4201 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4202 warning ("%Junused parameter %qD", decl, decl);
4205 static GTY(()) rtx initial_trampoline;
4207 /* Generate RTL for the end of the current function. */
4209 void
4210 expand_function_end (void)
4212 rtx clobber_after;
4214 /* If arg_pointer_save_area was referenced only from a nested
4215 function, we will not have initialized it yet. Do that now. */
4216 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4217 get_arg_pointer_save_area (cfun);
4219 /* If we are doing stack checking and this function makes calls,
4220 do a stack probe at the start of the function to ensure we have enough
4221 space for another stack frame. */
4222 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4224 rtx insn, seq;
4226 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4227 if (CALL_P (insn))
4229 start_sequence ();
4230 probe_stack_range (STACK_CHECK_PROTECT,
4231 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4232 seq = get_insns ();
4233 end_sequence ();
4234 emit_insn_before (seq, tail_recursion_reentry);
4235 break;
4239 /* Possibly warn about unused parameters.
4240 When frontend does unit-at-a-time, the warning is already
4241 issued at finalization time. */
4242 if (warn_unused_parameter
4243 && !lang_hooks.callgraph.expand_function)
4244 do_warn_unused_parameter (current_function_decl);
4246 /* End any sequences that failed to be closed due to syntax errors. */
4247 while (in_sequence_p ())
4248 end_sequence ();
4250 clear_pending_stack_adjust ();
4251 do_pending_stack_adjust ();
4253 /* @@@ This is a kludge. We want to ensure that instructions that
4254 may trap are not moved into the epilogue by scheduling, because
4255 we don't always emit unwind information for the epilogue.
4256 However, not all machine descriptions define a blockage insn, so
4257 emit an ASM_INPUT to act as one. */
4258 if (flag_non_call_exceptions)
4259 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4261 /* Mark the end of the function body.
4262 If control reaches this insn, the function can drop through
4263 without returning a value. */
4264 emit_note (NOTE_INSN_FUNCTION_END);
4266 /* Must mark the last line number note in the function, so that the test
4267 coverage code can avoid counting the last line twice. This just tells
4268 the code to ignore the immediately following line note, since there
4269 already exists a copy of this note somewhere above. This line number
4270 note is still needed for debugging though, so we can't delete it. */
4271 if (flag_test_coverage)
4272 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4274 /* Output a linenumber for the end of the function.
4275 SDB depends on this. */
4276 force_next_line_note ();
4277 emit_line_note (input_location);
4279 /* Before the return label (if any), clobber the return
4280 registers so that they are not propagated live to the rest of
4281 the function. This can only happen with functions that drop
4282 through; if there had been a return statement, there would
4283 have either been a return rtx, or a jump to the return label.
4285 We delay actual code generation after the current_function_value_rtx
4286 is computed. */
4287 clobber_after = get_last_insn ();
4289 /* Output the label for the actual return from the function,
4290 if one is expected. This happens either because a function epilogue
4291 is used instead of a return instruction, or because a return was done
4292 with a goto in order to run local cleanups, or because of pcc-style
4293 structure returning. */
4294 if (return_label)
4295 emit_label (return_label);
4297 /* Let except.c know where it should emit the call to unregister
4298 the function context for sjlj exceptions. */
4299 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4300 sjlj_emit_function_exit_after (get_last_insn ());
4302 /* If we had calls to alloca, and this machine needs
4303 an accurate stack pointer to exit the function,
4304 insert some code to save and restore the stack pointer. */
4305 if (! EXIT_IGNORE_STACK
4306 && current_function_calls_alloca)
4308 rtx tem = 0;
4310 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4311 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4314 /* If scalar return value was computed in a pseudo-reg, or was a named
4315 return value that got dumped to the stack, copy that to the hard
4316 return register. */
4317 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4319 tree decl_result = DECL_RESULT (current_function_decl);
4320 rtx decl_rtl = DECL_RTL (decl_result);
4322 if (REG_P (decl_rtl)
4323 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4324 : DECL_REGISTER (decl_result))
4326 rtx real_decl_rtl = current_function_return_rtx;
4328 /* This should be set in assign_parms. */
4329 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4331 /* If this is a BLKmode structure being returned in registers,
4332 then use the mode computed in expand_return. Note that if
4333 decl_rtl is memory, then its mode may have been changed,
4334 but that current_function_return_rtx has not. */
4335 if (GET_MODE (real_decl_rtl) == BLKmode)
4336 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4338 /* If a named return value dumped decl_return to memory, then
4339 we may need to re-do the PROMOTE_MODE signed/unsigned
4340 extension. */
4341 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4343 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4345 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4346 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4347 &unsignedp, 1);
4349 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4351 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4353 /* If expand_function_start has created a PARALLEL for decl_rtl,
4354 move the result to the real return registers. Otherwise, do
4355 a group load from decl_rtl for a named return. */
4356 if (GET_CODE (decl_rtl) == PARALLEL)
4357 emit_group_move (real_decl_rtl, decl_rtl);
4358 else
4359 emit_group_load (real_decl_rtl, decl_rtl,
4360 TREE_TYPE (decl_result),
4361 int_size_in_bytes (TREE_TYPE (decl_result)));
4363 else
4364 emit_move_insn (real_decl_rtl, decl_rtl);
4368 /* If returning a structure, arrange to return the address of the value
4369 in a place where debuggers expect to find it.
4371 If returning a structure PCC style,
4372 the caller also depends on this value.
4373 And current_function_returns_pcc_struct is not necessarily set. */
4374 if (current_function_returns_struct
4375 || current_function_returns_pcc_struct)
4377 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4378 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4379 rtx outgoing;
4381 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4382 type = TREE_TYPE (type);
4383 else
4384 value_address = XEXP (value_address, 0);
4386 #ifdef FUNCTION_OUTGOING_VALUE
4387 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4388 current_function_decl);
4389 #else
4390 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4391 current_function_decl);
4392 #endif
4394 /* Mark this as a function return value so integrate will delete the
4395 assignment and USE below when inlining this function. */
4396 REG_FUNCTION_VALUE_P (outgoing) = 1;
4398 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4399 value_address = convert_memory_address (GET_MODE (outgoing),
4400 value_address);
4402 emit_move_insn (outgoing, value_address);
4404 /* Show return register used to hold result (in this case the address
4405 of the result. */
4406 current_function_return_rtx = outgoing;
4409 /* If this is an implementation of throw, do what's necessary to
4410 communicate between __builtin_eh_return and the epilogue. */
4411 expand_eh_return ();
4413 /* Emit the actual code to clobber return register. */
4415 rtx seq;
4417 start_sequence ();
4418 clobber_return_register ();
4419 expand_naked_return ();
4420 seq = get_insns ();
4421 end_sequence ();
4423 emit_insn_after (seq, clobber_after);
4426 /* Output the label for the naked return from the function. */
4427 emit_label (naked_return_label);
4429 /* ??? This should no longer be necessary since stupid is no longer with
4430 us, but there are some parts of the compiler (eg reload_combine, and
4431 sh mach_dep_reorg) that still try and compute their own lifetime info
4432 instead of using the general framework. */
4433 use_return_register ();
4437 get_arg_pointer_save_area (struct function *f)
4439 rtx ret = f->x_arg_pointer_save_area;
4441 if (! ret)
4443 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4444 f->x_arg_pointer_save_area = ret;
4447 if (f == cfun && ! f->arg_pointer_save_area_init)
4449 rtx seq;
4451 /* Save the arg pointer at the beginning of the function. The
4452 generated stack slot may not be a valid memory address, so we
4453 have to check it and fix it if necessary. */
4454 start_sequence ();
4455 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4456 seq = get_insns ();
4457 end_sequence ();
4459 push_topmost_sequence ();
4460 emit_insn_after (seq, get_insns ());
4461 pop_topmost_sequence ();
4464 return ret;
4467 /* Extend a vector that records the INSN_UIDs of INSNS
4468 (a list of one or more insns). */
4470 static void
4471 record_insns (rtx insns, varray_type *vecp)
4473 int i, len;
4474 rtx tmp;
4476 tmp = insns;
4477 len = 0;
4478 while (tmp != NULL_RTX)
4480 len++;
4481 tmp = NEXT_INSN (tmp);
4484 i = VARRAY_SIZE (*vecp);
4485 VARRAY_GROW (*vecp, i + len);
4486 tmp = insns;
4487 while (tmp != NULL_RTX)
4489 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4490 i++;
4491 tmp = NEXT_INSN (tmp);
4495 /* Set the locator of the insn chain starting at INSN to LOC. */
4496 static void
4497 set_insn_locators (rtx insn, int loc)
4499 while (insn != NULL_RTX)
4501 if (INSN_P (insn))
4502 INSN_LOCATOR (insn) = loc;
4503 insn = NEXT_INSN (insn);
4507 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4508 be running after reorg, SEQUENCE rtl is possible. */
4510 static int
4511 contains (rtx insn, varray_type vec)
4513 int i, j;
4515 if (NONJUMP_INSN_P (insn)
4516 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4518 int count = 0;
4519 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4520 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4521 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4522 count++;
4523 return count;
4525 else
4527 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4528 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4529 return 1;
4531 return 0;
4535 prologue_epilogue_contains (rtx insn)
4537 if (contains (insn, prologue))
4538 return 1;
4539 if (contains (insn, epilogue))
4540 return 1;
4541 return 0;
4545 sibcall_epilogue_contains (rtx insn)
4547 if (sibcall_epilogue)
4548 return contains (insn, sibcall_epilogue);
4549 return 0;
4552 #ifdef HAVE_return
4553 /* Insert gen_return at the end of block BB. This also means updating
4554 block_for_insn appropriately. */
4556 static void
4557 emit_return_into_block (basic_block bb, rtx line_note)
4559 emit_jump_insn_after (gen_return (), BB_END (bb));
4560 if (line_note)
4561 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4563 #endif /* HAVE_return */
4565 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4567 /* These functions convert the epilogue into a variant that does not modify the
4568 stack pointer. This is used in cases where a function returns an object
4569 whose size is not known until it is computed. The called function leaves the
4570 object on the stack, leaves the stack depressed, and returns a pointer to
4571 the object.
4573 What we need to do is track all modifications and references to the stack
4574 pointer, deleting the modifications and changing the references to point to
4575 the location the stack pointer would have pointed to had the modifications
4576 taken place.
4578 These functions need to be portable so we need to make as few assumptions
4579 about the epilogue as we can. However, the epilogue basically contains
4580 three things: instructions to reset the stack pointer, instructions to
4581 reload registers, possibly including the frame pointer, and an
4582 instruction to return to the caller.
4584 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4585 We also make no attempt to validate the insns we make since if they are
4586 invalid, we probably can't do anything valid. The intent is that these
4587 routines get "smarter" as more and more machines start to use them and
4588 they try operating on different epilogues.
4590 We use the following structure to track what the part of the epilogue that
4591 we've already processed has done. We keep two copies of the SP equivalence,
4592 one for use during the insn we are processing and one for use in the next
4593 insn. The difference is because one part of a PARALLEL may adjust SP
4594 and the other may use it. */
4596 struct epi_info
4598 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4599 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4600 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4601 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4602 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4603 should be set to once we no longer need
4604 its value. */
4605 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4606 for registers. */
4609 static void handle_epilogue_set (rtx, struct epi_info *);
4610 static void update_epilogue_consts (rtx, rtx, void *);
4611 static void emit_equiv_load (struct epi_info *);
4613 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4614 no modifications to the stack pointer. Return the new list of insns. */
4616 static rtx
4617 keep_stack_depressed (rtx insns)
4619 int j;
4620 struct epi_info info;
4621 rtx insn, next;
4623 /* If the epilogue is just a single instruction, it must be OK as is. */
4624 if (NEXT_INSN (insns) == NULL_RTX)
4625 return insns;
4627 /* Otherwise, start a sequence, initialize the information we have, and
4628 process all the insns we were given. */
4629 start_sequence ();
4631 info.sp_equiv_reg = stack_pointer_rtx;
4632 info.sp_offset = 0;
4633 info.equiv_reg_src = 0;
4635 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4636 info.const_equiv[j] = 0;
4638 insn = insns;
4639 next = NULL_RTX;
4640 while (insn != NULL_RTX)
4642 next = NEXT_INSN (insn);
4644 if (!INSN_P (insn))
4646 add_insn (insn);
4647 insn = next;
4648 continue;
4651 /* If this insn references the register that SP is equivalent to and
4652 we have a pending load to that register, we must force out the load
4653 first and then indicate we no longer know what SP's equivalent is. */
4654 if (info.equiv_reg_src != 0
4655 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4657 emit_equiv_load (&info);
4658 info.sp_equiv_reg = 0;
4661 info.new_sp_equiv_reg = info.sp_equiv_reg;
4662 info.new_sp_offset = info.sp_offset;
4664 /* If this is a (RETURN) and the return address is on the stack,
4665 update the address and change to an indirect jump. */
4666 if (GET_CODE (PATTERN (insn)) == RETURN
4667 || (GET_CODE (PATTERN (insn)) == PARALLEL
4668 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4670 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4671 rtx base = 0;
4672 HOST_WIDE_INT offset = 0;
4673 rtx jump_insn, jump_set;
4675 /* If the return address is in a register, we can emit the insn
4676 unchanged. Otherwise, it must be a MEM and we see what the
4677 base register and offset are. In any case, we have to emit any
4678 pending load to the equivalent reg of SP, if any. */
4679 if (REG_P (retaddr))
4681 emit_equiv_load (&info);
4682 add_insn (insn);
4683 insn = next;
4684 continue;
4686 else
4688 rtx ret_ptr;
4689 gcc_assert (MEM_P (retaddr));
4691 ret_ptr = XEXP (retaddr, 0);
4693 if (REG_P (ret_ptr))
4695 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4696 offset = 0;
4698 else
4700 gcc_assert (GET_CODE (ret_ptr) == PLUS
4701 && REG_P (XEXP (ret_ptr, 0))
4702 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4703 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4704 offset = INTVAL (XEXP (ret_ptr, 1));
4708 /* If the base of the location containing the return pointer
4709 is SP, we must update it with the replacement address. Otherwise,
4710 just build the necessary MEM. */
4711 retaddr = plus_constant (base, offset);
4712 if (base == stack_pointer_rtx)
4713 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4714 plus_constant (info.sp_equiv_reg,
4715 info.sp_offset));
4717 retaddr = gen_rtx_MEM (Pmode, retaddr);
4719 /* If there is a pending load to the equivalent register for SP
4720 and we reference that register, we must load our address into
4721 a scratch register and then do that load. */
4722 if (info.equiv_reg_src
4723 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4725 unsigned int regno;
4726 rtx reg;
4728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4729 if (HARD_REGNO_MODE_OK (regno, Pmode)
4730 && !fixed_regs[regno]
4731 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4732 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4733 regno)
4734 && !refers_to_regno_p (regno,
4735 regno + hard_regno_nregs[regno]
4736 [Pmode],
4737 info.equiv_reg_src, NULL)
4738 && info.const_equiv[regno] == 0)
4739 break;
4741 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4743 reg = gen_rtx_REG (Pmode, regno);
4744 emit_move_insn (reg, retaddr);
4745 retaddr = reg;
4748 emit_equiv_load (&info);
4749 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4751 /* Show the SET in the above insn is a RETURN. */
4752 jump_set = single_set (jump_insn);
4753 gcc_assert (jump_set);
4754 SET_IS_RETURN_P (jump_set) = 1;
4757 /* If SP is not mentioned in the pattern and its equivalent register, if
4758 any, is not modified, just emit it. Otherwise, if neither is set,
4759 replace the reference to SP and emit the insn. If none of those are
4760 true, handle each SET individually. */
4761 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4762 && (info.sp_equiv_reg == stack_pointer_rtx
4763 || !reg_set_p (info.sp_equiv_reg, insn)))
4764 add_insn (insn);
4765 else if (! reg_set_p (stack_pointer_rtx, insn)
4766 && (info.sp_equiv_reg == stack_pointer_rtx
4767 || !reg_set_p (info.sp_equiv_reg, insn)))
4769 int changed;
4771 changed = validate_replace_rtx (stack_pointer_rtx,
4772 plus_constant (info.sp_equiv_reg,
4773 info.sp_offset),
4774 insn);
4775 gcc_assert (changed);
4777 add_insn (insn);
4779 else if (GET_CODE (PATTERN (insn)) == SET)
4780 handle_epilogue_set (PATTERN (insn), &info);
4781 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4783 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4784 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4785 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4787 else
4788 add_insn (insn);
4790 info.sp_equiv_reg = info.new_sp_equiv_reg;
4791 info.sp_offset = info.new_sp_offset;
4793 /* Now update any constants this insn sets. */
4794 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4795 insn = next;
4798 insns = get_insns ();
4799 end_sequence ();
4800 return insns;
4803 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4804 structure that contains information about what we've seen so far. We
4805 process this SET by either updating that data or by emitting one or
4806 more insns. */
4808 static void
4809 handle_epilogue_set (rtx set, struct epi_info *p)
4811 /* First handle the case where we are setting SP. Record what it is being
4812 set from. If unknown, abort. */
4813 if (reg_set_p (stack_pointer_rtx, set))
4815 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4817 if (GET_CODE (SET_SRC (set)) == PLUS)
4819 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4820 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4821 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4822 else
4824 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4825 && (REGNO (XEXP (SET_SRC (set), 1))
4826 < FIRST_PSEUDO_REGISTER)
4827 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4828 p->new_sp_offset
4829 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4832 else
4833 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4835 /* If we are adjusting SP, we adjust from the old data. */
4836 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4838 p->new_sp_equiv_reg = p->sp_equiv_reg;
4839 p->new_sp_offset += p->sp_offset;
4842 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4844 return;
4847 /* Next handle the case where we are setting SP's equivalent register.
4848 If we already have a value to set it to, abort. We could update, but
4849 there seems little point in handling that case. Note that we have
4850 to allow for the case where we are setting the register set in
4851 the previous part of a PARALLEL inside a single insn. But use the
4852 old offset for any updates within this insn. We must allow for the case
4853 where the register is being set in a different (usually wider) mode than
4854 Pmode). */
4855 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4857 gcc_assert (!p->equiv_reg_src
4858 && REG_P (p->new_sp_equiv_reg)
4859 && REG_P (SET_DEST (set))
4860 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4861 <= BITS_PER_WORD)
4862 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4863 p->equiv_reg_src
4864 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4865 plus_constant (p->sp_equiv_reg,
4866 p->sp_offset));
4869 /* Otherwise, replace any references to SP in the insn to its new value
4870 and emit the insn. */
4871 else
4873 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4874 plus_constant (p->sp_equiv_reg,
4875 p->sp_offset));
4876 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4877 plus_constant (p->sp_equiv_reg,
4878 p->sp_offset));
4879 emit_insn (set);
4883 /* Update the tracking information for registers set to constants. */
4885 static void
4886 update_epilogue_consts (rtx dest, rtx x, void *data)
4888 struct epi_info *p = (struct epi_info *) data;
4889 rtx new;
4891 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4892 return;
4894 /* If we are either clobbering a register or doing a partial set,
4895 show we don't know the value. */
4896 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4897 p->const_equiv[REGNO (dest)] = 0;
4899 /* If we are setting it to a constant, record that constant. */
4900 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4901 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4903 /* If this is a binary operation between a register we have been tracking
4904 and a constant, see if we can compute a new constant value. */
4905 else if (ARITHMETIC_P (SET_SRC (x))
4906 && REG_P (XEXP (SET_SRC (x), 0))
4907 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4908 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
4909 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4910 && 0 != (new = simplify_binary_operation
4911 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
4912 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
4913 XEXP (SET_SRC (x), 1)))
4914 && GET_CODE (new) == CONST_INT)
4915 p->const_equiv[REGNO (dest)] = new;
4917 /* Otherwise, we can't do anything with this value. */
4918 else
4919 p->const_equiv[REGNO (dest)] = 0;
4922 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
4924 static void
4925 emit_equiv_load (struct epi_info *p)
4927 if (p->equiv_reg_src != 0)
4929 rtx dest = p->sp_equiv_reg;
4931 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
4932 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
4933 REGNO (p->sp_equiv_reg));
4935 emit_move_insn (dest, p->equiv_reg_src);
4936 p->equiv_reg_src = 0;
4939 #endif
4941 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4942 this into place with notes indicating where the prologue ends and where
4943 the epilogue begins. Update the basic block information when possible. */
4945 void
4946 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
4948 int inserted = 0;
4949 edge e;
4950 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4951 rtx seq;
4952 #endif
4953 #ifdef HAVE_prologue
4954 rtx prologue_end = NULL_RTX;
4955 #endif
4956 #if defined (HAVE_epilogue) || defined(HAVE_return)
4957 rtx epilogue_end = NULL_RTX;
4958 #endif
4959 edge_iterator ei;
4961 #ifdef HAVE_prologue
4962 if (HAVE_prologue)
4964 start_sequence ();
4965 seq = gen_prologue ();
4966 emit_insn (seq);
4968 /* Retain a map of the prologue insns. */
4969 record_insns (seq, &prologue);
4970 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
4972 seq = get_insns ();
4973 end_sequence ();
4974 set_insn_locators (seq, prologue_locator);
4976 /* Can't deal with multiple successors of the entry block
4977 at the moment. Function should always have at least one
4978 entry point. */
4979 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4981 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
4982 inserted = 1;
4984 #endif
4986 /* If the exit block has no non-fake predecessors, we don't need
4987 an epilogue. */
4988 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4989 if ((e->flags & EDGE_FAKE) == 0)
4990 break;
4991 if (e == NULL)
4992 goto epilogue_done;
4994 #ifdef HAVE_return
4995 if (optimize && HAVE_return)
4997 /* If we're allowed to generate a simple return instruction,
4998 then by definition we don't need a full epilogue. Examine
4999 the block that falls through to EXIT. If it does not
5000 contain any code, examine its predecessors and try to
5001 emit (conditional) return instructions. */
5003 basic_block last;
5004 rtx label;
5006 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5007 if (e->flags & EDGE_FALLTHRU)
5008 break;
5009 if (e == NULL)
5010 goto epilogue_done;
5011 last = e->src;
5013 /* Verify that there are no active instructions in the last block. */
5014 label = BB_END (last);
5015 while (label && !LABEL_P (label))
5017 if (active_insn_p (label))
5018 break;
5019 label = PREV_INSN (label);
5022 if (BB_HEAD (last) == label && LABEL_P (label))
5024 edge_iterator ei2;
5025 rtx epilogue_line_note = NULL_RTX;
5027 /* Locate the line number associated with the closing brace,
5028 if we can find one. */
5029 for (seq = get_last_insn ();
5030 seq && ! active_insn_p (seq);
5031 seq = PREV_INSN (seq))
5032 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5034 epilogue_line_note = seq;
5035 break;
5038 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5040 basic_block bb = e->src;
5041 rtx jump;
5043 if (bb == ENTRY_BLOCK_PTR)
5045 ei_next (&ei2);
5046 continue;
5049 jump = BB_END (bb);
5050 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5052 ei_next (&ei2);
5053 continue;
5056 /* If we have an unconditional jump, we can replace that
5057 with a simple return instruction. */
5058 if (simplejump_p (jump))
5060 emit_return_into_block (bb, epilogue_line_note);
5061 delete_insn (jump);
5064 /* If we have a conditional jump, we can try to replace
5065 that with a conditional return instruction. */
5066 else if (condjump_p (jump))
5068 if (! redirect_jump (jump, 0, 0))
5070 ei_next (&ei2);
5071 continue;
5074 /* If this block has only one successor, it both jumps
5075 and falls through to the fallthru block, so we can't
5076 delete the edge. */
5077 if (EDGE_COUNT (bb->succs) == 1)
5079 ei_next (&ei2);
5080 continue;
5083 else
5085 ei_next (&ei2);
5086 continue;
5089 /* Fix up the CFG for the successful change we just made. */
5090 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5093 /* Emit a return insn for the exit fallthru block. Whether
5094 this is still reachable will be determined later. */
5096 emit_barrier_after (BB_END (last));
5097 emit_return_into_block (last, epilogue_line_note);
5098 epilogue_end = BB_END (last);
5099 EDGE_SUCC (last, 0)->flags &= ~EDGE_FALLTHRU;
5100 goto epilogue_done;
5103 #endif
5104 /* Find the edge that falls through to EXIT. Other edges may exist
5105 due to RETURN instructions, but those don't need epilogues.
5106 There really shouldn't be a mixture -- either all should have
5107 been converted or none, however... */
5109 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5110 if (e->flags & EDGE_FALLTHRU)
5111 break;
5112 if (e == NULL)
5113 goto epilogue_done;
5115 #ifdef HAVE_epilogue
5116 if (HAVE_epilogue)
5118 start_sequence ();
5119 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5121 seq = gen_epilogue ();
5123 #ifdef INCOMING_RETURN_ADDR_RTX
5124 /* If this function returns with the stack depressed and we can support
5125 it, massage the epilogue to actually do that. */
5126 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5127 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5128 seq = keep_stack_depressed (seq);
5129 #endif
5131 emit_jump_insn (seq);
5133 /* Retain a map of the epilogue insns. */
5134 record_insns (seq, &epilogue);
5135 set_insn_locators (seq, epilogue_locator);
5137 seq = get_insns ();
5138 end_sequence ();
5140 insert_insn_on_edge (seq, e);
5141 inserted = 1;
5143 else
5144 #endif
5146 basic_block cur_bb;
5148 if (! next_active_insn (BB_END (e->src)))
5149 goto epilogue_done;
5150 /* We have a fall-through edge to the exit block, the source is not
5151 at the end of the function, and there will be an assembler epilogue
5152 at the end of the function.
5153 We can't use force_nonfallthru here, because that would try to
5154 use return. Inserting a jump 'by hand' is extremely messy, so
5155 we take advantage of cfg_layout_finalize using
5156 fixup_fallthru_exit_predecessor. */
5157 cfg_layout_initialize (0);
5158 FOR_EACH_BB (cur_bb)
5159 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5160 cur_bb->rbi->next = cur_bb->next_bb;
5161 cfg_layout_finalize ();
5163 epilogue_done:
5165 if (inserted)
5166 commit_edge_insertions ();
5168 #ifdef HAVE_sibcall_epilogue
5169 /* Emit sibling epilogues before any sibling call sites. */
5170 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5172 basic_block bb = e->src;
5173 rtx insn = BB_END (bb);
5174 rtx i;
5175 rtx newinsn;
5177 if (!CALL_P (insn)
5178 || ! SIBLING_CALL_P (insn))
5180 ei_next (&ei);
5181 continue;
5184 start_sequence ();
5185 emit_insn (gen_sibcall_epilogue ());
5186 seq = get_insns ();
5187 end_sequence ();
5189 /* Retain a map of the epilogue insns. Used in life analysis to
5190 avoid getting rid of sibcall epilogue insns. Do this before we
5191 actually emit the sequence. */
5192 record_insns (seq, &sibcall_epilogue);
5193 set_insn_locators (seq, epilogue_locator);
5195 i = PREV_INSN (insn);
5196 newinsn = emit_insn_before (seq, insn);
5197 ei_next (&ei);
5199 #endif
5201 #ifdef HAVE_prologue
5202 /* This is probably all useless now that we use locators. */
5203 if (prologue_end)
5205 rtx insn, prev;
5207 /* GDB handles `break f' by setting a breakpoint on the first
5208 line note after the prologue. Which means (1) that if
5209 there are line number notes before where we inserted the
5210 prologue we should move them, and (2) we should generate a
5211 note before the end of the first basic block, if there isn't
5212 one already there.
5214 ??? This behavior is completely broken when dealing with
5215 multiple entry functions. We simply place the note always
5216 into first basic block and let alternate entry points
5217 to be missed.
5220 for (insn = prologue_end; insn; insn = prev)
5222 prev = PREV_INSN (insn);
5223 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5225 /* Note that we cannot reorder the first insn in the
5226 chain, since rest_of_compilation relies on that
5227 remaining constant. */
5228 if (prev == NULL)
5229 break;
5230 reorder_insns (insn, insn, prologue_end);
5234 /* Find the last line number note in the first block. */
5235 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5236 insn != prologue_end && insn;
5237 insn = PREV_INSN (insn))
5238 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5239 break;
5241 /* If we didn't find one, make a copy of the first line number
5242 we run across. */
5243 if (! insn)
5245 for (insn = next_active_insn (prologue_end);
5246 insn;
5247 insn = PREV_INSN (insn))
5248 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5250 emit_note_copy_after (insn, prologue_end);
5251 break;
5255 #endif
5256 #ifdef HAVE_epilogue
5257 if (epilogue_end)
5259 rtx insn, next;
5261 /* Similarly, move any line notes that appear after the epilogue.
5262 There is no need, however, to be quite so anal about the existence
5263 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5264 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5265 info generation. */
5266 for (insn = epilogue_end; insn; insn = next)
5268 next = NEXT_INSN (insn);
5269 if (NOTE_P (insn)
5270 && (NOTE_LINE_NUMBER (insn) > 0
5271 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5272 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5273 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5276 #endif
5279 /* Reposition the prologue-end and epilogue-begin notes after instruction
5280 scheduling and delayed branch scheduling. */
5282 void
5283 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5285 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5286 rtx insn, last, note;
5287 int len;
5289 if ((len = VARRAY_SIZE (prologue)) > 0)
5291 last = 0, note = 0;
5293 /* Scan from the beginning until we reach the last prologue insn.
5294 We apparently can't depend on basic_block_{head,end} after
5295 reorg has run. */
5296 for (insn = f; insn; insn = NEXT_INSN (insn))
5298 if (NOTE_P (insn))
5300 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5301 note = insn;
5303 else if (contains (insn, prologue))
5305 last = insn;
5306 if (--len == 0)
5307 break;
5311 if (last)
5313 /* Find the prologue-end note if we haven't already, and
5314 move it to just after the last prologue insn. */
5315 if (note == 0)
5317 for (note = last; (note = NEXT_INSN (note));)
5318 if (NOTE_P (note)
5319 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5320 break;
5323 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5324 if (LABEL_P (last))
5325 last = NEXT_INSN (last);
5326 reorder_insns (note, note, last);
5330 if ((len = VARRAY_SIZE (epilogue)) > 0)
5332 last = 0, note = 0;
5334 /* Scan from the end until we reach the first epilogue insn.
5335 We apparently can't depend on basic_block_{head,end} after
5336 reorg has run. */
5337 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5339 if (NOTE_P (insn))
5341 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5342 note = insn;
5344 else if (contains (insn, epilogue))
5346 last = insn;
5347 if (--len == 0)
5348 break;
5352 if (last)
5354 /* Find the epilogue-begin note if we haven't already, and
5355 move it to just before the first epilogue insn. */
5356 if (note == 0)
5358 for (note = insn; (note = PREV_INSN (note));)
5359 if (NOTE_P (note)
5360 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5361 break;
5364 if (PREV_INSN (last) != note)
5365 reorder_insns (note, note, PREV_INSN (last));
5368 #endif /* HAVE_prologue or HAVE_epilogue */
5371 /* Called once, at initialization, to initialize function.c. */
5373 void
5374 init_function_once (void)
5376 VARRAY_INT_INIT (prologue, 0, "prologue");
5377 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5378 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5381 /* Resets insn_block_boundaries array. */
5383 void
5384 reset_block_changes (void)
5386 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5387 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5390 /* Record the boundary for BLOCK. */
5391 void
5392 record_block_change (tree block)
5394 int i, n;
5395 tree last_block;
5397 if (!block)
5398 return;
5400 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5401 VARRAY_POP (cfun->ib_boundaries_block);
5402 n = get_max_uid ();
5403 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5404 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5406 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5409 /* Finishes record of boundaries. */
5410 void finalize_block_changes (void)
5412 record_block_change (DECL_INITIAL (current_function_decl));
5415 /* For INSN return the BLOCK it belongs to. */
5416 void
5417 check_block_change (rtx insn, tree *block)
5419 unsigned uid = INSN_UID (insn);
5421 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5422 return;
5424 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5427 /* Releases the ib_boundaries_block records. */
5428 void
5429 free_block_changes (void)
5431 cfun->ib_boundaries_block = NULL;
5434 /* Returns the name of the current function. */
5435 const char *
5436 current_function_name (void)
5438 return lang_hooks.decl_printable_name (cfun->decl, 2);
5441 #include "gt-function.h"