PR c/16392
[official-gcc.git] / gcc / function.c
blobbb2afd918f3f99ed0e2e046ea651d17621fead25
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
63 #ifndef LOCAL_ALIGNMENT
64 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
65 #endif
67 #ifndef STACK_ALIGNMENT_NEEDED
68 #define STACK_ALIGNMENT_NEEDED 1
69 #endif
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* Some systems use __main in a way incompatible with its use in gcc, in these
74 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
75 give the same symbol without quotes for an alternative entry point. You
76 must define both, or neither. */
77 #ifndef NAME__MAIN
78 #define NAME__MAIN "__main"
79 #endif
81 /* Round a value to the lowest integer less than it that is a multiple of
82 the required alignment. Avoid using division in case the value is
83 negative. Assume the alignment is a power of two. */
84 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86 /* Similar, but round to the next highest integer that meets the
87 alignment. */
88 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
91 during rtl generation. If they are different register numbers, this is
92 always true. It may also be true if
93 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
94 generation. See fix_lexical_addr for details. */
96 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
97 #define NEED_SEPARATE_AP
98 #endif
100 /* Nonzero if function being compiled doesn't contain any calls
101 (ignoring the prologue and epilogue). This is set prior to
102 local register allocation and is valid for the remaining
103 compiler passes. */
104 int current_function_is_leaf;
106 /* Nonzero if function being compiled doesn't contain any instructions
107 that can throw an exception. This is set prior to final. */
109 int current_function_nothrow;
111 /* Nonzero if function being compiled doesn't modify the stack pointer
112 (ignoring the prologue and epilogue). This is only valid after
113 life_analysis has run. */
114 int current_function_sp_is_unchanging;
116 /* Nonzero if the function being compiled is a leaf function which only
117 uses leaf registers. This is valid after reload (specifically after
118 sched2) and is useful only if the port defines LEAF_REGISTERS. */
119 int current_function_uses_only_leaf_regs;
121 /* Nonzero once virtual register instantiation has been done.
122 assign_stack_local uses frame_pointer_rtx when this is nonzero.
123 calls.c:emit_library_call_value_1 uses it to set up
124 post-instantiation libcalls. */
125 int virtuals_instantiated;
127 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
128 static GTY(()) int funcdef_no;
130 /* These variables hold pointers to functions to create and destroy
131 target specific, per-function data structures. */
132 struct machine_function * (*init_machine_status) (void);
134 /* The currently compiled function. */
135 struct function *cfun = 0;
137 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
138 static GTY(()) varray_type prologue;
139 static GTY(()) varray_type epilogue;
141 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
142 in this function. */
143 static GTY(()) varray_type sibcall_epilogue;
145 /* In order to evaluate some expressions, such as function calls returning
146 structures in memory, we need to temporarily allocate stack locations.
147 We record each allocated temporary in the following structure.
149 Associated with each temporary slot is a nesting level. When we pop up
150 one level, all temporaries associated with the previous level are freed.
151 Normally, all temporaries are freed after the execution of the statement
152 in which they were created. However, if we are inside a ({...}) grouping,
153 the result may be in a temporary and hence must be preserved. If the
154 result could be in a temporary, we preserve it if we can determine which
155 one it is in. If we cannot determine which temporary may contain the
156 result, all temporaries are preserved. A temporary is preserved by
157 pretending it was allocated at the previous nesting level.
159 Automatic variables are also assigned temporary slots, at the nesting
160 level where they are defined. They are marked a "kept" so that
161 free_temp_slots will not free them. */
163 struct temp_slot GTY(())
165 /* Points to next temporary slot. */
166 struct temp_slot *next;
167 /* Points to previous temporary slot. */
168 struct temp_slot *prev;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 unsigned int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The type of the object in the slot, or zero if it doesn't correspond
180 to a type. We use this to determine whether a slot can be reused.
181 It can be reused if objects of the type of the new slot will always
182 conflict with objects of the type of the old slot. */
183 tree type;
184 /* Nonzero if this temporary is currently in use. */
185 char in_use;
186 /* Nonzero if this temporary has its address taken. */
187 char addr_taken;
188 /* Nesting level at which this slot is being used. */
189 int level;
190 /* Nonzero if this should survive a call to free_temp_slots. */
191 int keep;
192 /* The offset of the slot from the frame_pointer, including extra space
193 for alignment. This info is for combine_temp_slots. */
194 HOST_WIDE_INT base_offset;
195 /* The size of the slot, including extra space for alignment. This
196 info is for combine_temp_slots. */
197 HOST_WIDE_INT full_size;
200 /* Forward declarations. */
202 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
203 struct function *);
204 static struct temp_slot *find_temp_slot_from_address (rtx);
205 static void instantiate_decls (tree, int);
206 static void instantiate_decls_1 (tree, int);
207 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
208 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
209 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
210 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
211 static void pad_below (struct args_size *, enum machine_mode, tree);
212 static void reorder_blocks_1 (rtx, tree, varray_type *);
213 static void reorder_fix_fragments (tree);
214 static int all_blocks (tree, tree *);
215 static tree *get_block_vector (tree, int *);
216 extern tree debug_find_var_in_block_tree (tree, tree);
217 /* We always define `record_insns' even if it's not used so that we
218 can always export `prologue_epilogue_contains'. */
219 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
220 static int contains (rtx, varray_type);
221 #ifdef HAVE_return
222 static void emit_return_into_block (basic_block, rtx);
223 #endif
224 static void purge_single_hard_subreg_set (rtx);
225 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
226 static rtx keep_stack_depressed (rtx);
227 #endif
228 static void prepare_function_start (tree);
229 static void do_clobber_return_reg (rtx, void *);
230 static void do_use_return_reg (rtx, void *);
231 static void instantiate_virtual_regs_lossage (rtx);
232 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
234 /* Pointer to chain of `struct function' for containing functions. */
235 struct function *outer_function_chain;
237 /* Given a function decl for a containing function,
238 return the `struct function' for it. */
240 struct function *
241 find_function_data (tree decl)
243 struct function *p;
245 for (p = outer_function_chain; p; p = p->outer)
246 if (p->decl == decl)
247 return p;
249 abort ();
252 /* Save the current context for compilation of a nested function.
253 This is called from language-specific code. The caller should use
254 the enter_nested langhook to save any language-specific state,
255 since this function knows only about language-independent
256 variables. */
258 void
259 push_function_context_to (tree context)
261 struct function *p;
263 if (context)
265 if (context == current_function_decl)
266 cfun->contains_functions = 1;
267 else
269 struct function *containing = find_function_data (context);
270 containing->contains_functions = 1;
274 if (cfun == 0)
275 init_dummy_function_start ();
276 p = cfun;
278 p->outer = outer_function_chain;
279 outer_function_chain = p;
281 lang_hooks.function.enter_nested (p);
283 cfun = 0;
286 void
287 push_function_context (void)
289 push_function_context_to (current_function_decl);
292 /* Restore the last saved context, at the end of a nested function.
293 This function is called from language-specific code. */
295 void
296 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
298 struct function *p = outer_function_chain;
300 cfun = p;
301 outer_function_chain = p->outer;
303 current_function_decl = p->decl;
304 reg_renumber = 0;
306 restore_emit_status (p);
308 lang_hooks.function.leave_nested (p);
310 /* Reset variables that have known state during rtx generation. */
311 rtx_equal_function_value_matters = 1;
312 virtuals_instantiated = 0;
313 generating_concat_p = 1;
316 void
317 pop_function_context (void)
319 pop_function_context_from (current_function_decl);
322 /* Clear out all parts of the state in F that can safely be discarded
323 after the function has been parsed, but not compiled, to let
324 garbage collection reclaim the memory. */
326 void
327 free_after_parsing (struct function *f)
329 /* f->expr->forced_labels is used by code generation. */
330 /* f->emit->regno_reg_rtx is used by code generation. */
331 /* f->varasm is used by code generation. */
332 /* f->eh->eh_return_stub_label is used by code generation. */
334 lang_hooks.function.final (f);
335 f->stmt = NULL;
338 /* Clear out all parts of the state in F that can safely be discarded
339 after the function has been compiled, to let garbage collection
340 reclaim the memory. */
342 void
343 free_after_compilation (struct function *f)
345 f->eh = NULL;
346 f->expr = NULL;
347 f->emit = NULL;
348 f->varasm = NULL;
349 f->machine = NULL;
351 f->x_avail_temp_slots = NULL;
352 f->x_used_temp_slots = NULL;
353 f->arg_offset_rtx = NULL;
354 f->return_rtx = NULL;
355 f->internal_arg_pointer = NULL;
356 f->x_nonlocal_goto_handler_labels = NULL;
357 f->x_return_label = NULL;
358 f->x_naked_return_label = NULL;
359 f->x_stack_slot_list = NULL;
360 f->x_tail_recursion_reentry = NULL;
361 f->x_arg_pointer_save_area = NULL;
362 f->x_parm_birth_insn = NULL;
363 f->original_arg_vector = NULL;
364 f->original_decl_initial = NULL;
365 f->epilogue_delay_list = NULL;
368 /* Allocate fixed slots in the stack frame of the current function. */
370 /* Return size needed for stack frame based on slots so far allocated in
371 function F.
372 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
373 the caller may have to do that. */
375 HOST_WIDE_INT
376 get_func_frame_size (struct function *f)
378 #ifdef FRAME_GROWS_DOWNWARD
379 return -f->x_frame_offset;
380 #else
381 return f->x_frame_offset;
382 #endif
385 /* Return size needed for stack frame based on slots so far allocated.
386 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
387 the caller may have to do that. */
388 HOST_WIDE_INT
389 get_frame_size (void)
391 return get_func_frame_size (cfun);
394 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
395 with machine mode MODE.
397 ALIGN controls the amount of alignment for the address of the slot:
398 0 means according to MODE,
399 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
400 -2 means use BITS_PER_UNIT,
401 positive specifies alignment boundary in bits.
403 We do not round to stack_boundary here.
405 FUNCTION specifies the function to allocate in. */
407 static rtx
408 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
409 struct function *function)
411 rtx x, addr;
412 int bigend_correction = 0;
413 int alignment;
414 int frame_off, frame_alignment, frame_phase;
416 if (align == 0)
418 tree type;
420 if (mode == BLKmode)
421 alignment = BIGGEST_ALIGNMENT;
422 else
423 alignment = GET_MODE_ALIGNMENT (mode);
425 /* Allow the target to (possibly) increase the alignment of this
426 stack slot. */
427 type = lang_hooks.types.type_for_mode (mode, 0);
428 if (type)
429 alignment = LOCAL_ALIGNMENT (type, alignment);
431 alignment /= BITS_PER_UNIT;
433 else if (align == -1)
435 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
436 size = CEIL_ROUND (size, alignment);
438 else if (align == -2)
439 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
440 else
441 alignment = align / BITS_PER_UNIT;
443 #ifdef FRAME_GROWS_DOWNWARD
444 function->x_frame_offset -= size;
445 #endif
447 /* Ignore alignment we can't do with expected alignment of the boundary. */
448 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
449 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
451 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
452 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
454 /* Calculate how many bytes the start of local variables is off from
455 stack alignment. */
456 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
457 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
458 frame_phase = frame_off ? frame_alignment - frame_off : 0;
460 /* Round the frame offset to the specified alignment. The default is
461 to always honor requests to align the stack but a port may choose to
462 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
463 if (STACK_ALIGNMENT_NEEDED
464 || mode != BLKmode
465 || size != 0)
467 /* We must be careful here, since FRAME_OFFSET might be negative and
468 division with a negative dividend isn't as well defined as we might
469 like. So we instead assume that ALIGNMENT is a power of two and
470 use logical operations which are unambiguous. */
471 #ifdef FRAME_GROWS_DOWNWARD
472 function->x_frame_offset
473 = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
474 + frame_phase);
475 #else
476 function->x_frame_offset
477 = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
478 + frame_phase);
479 #endif
482 /* On a big-endian machine, if we are allocating more space than we will use,
483 use the least significant bytes of those that are allocated. */
484 if (BYTES_BIG_ENDIAN && mode != BLKmode)
485 bigend_correction = size - GET_MODE_SIZE (mode);
487 /* If we have already instantiated virtual registers, return the actual
488 address relative to the frame pointer. */
489 if (function == cfun && virtuals_instantiated)
490 addr = plus_constant (frame_pointer_rtx,
491 trunc_int_for_mode
492 (frame_offset + bigend_correction
493 + STARTING_FRAME_OFFSET, Pmode));
494 else
495 addr = plus_constant (virtual_stack_vars_rtx,
496 trunc_int_for_mode
497 (function->x_frame_offset + bigend_correction,
498 Pmode));
500 #ifndef FRAME_GROWS_DOWNWARD
501 function->x_frame_offset += size;
502 #endif
504 x = gen_rtx_MEM (mode, addr);
506 function->x_stack_slot_list
507 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
509 return x;
512 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
513 current function. */
516 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
518 return assign_stack_local_1 (mode, size, align, cfun);
522 /* Removes temporary slot TEMP from LIST. */
524 static void
525 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
527 if (temp->next)
528 temp->next->prev = temp->prev;
529 if (temp->prev)
530 temp->prev->next = temp->next;
531 else
532 *list = temp->next;
534 temp->prev = temp->next = NULL;
537 /* Inserts temporary slot TEMP to LIST. */
539 static void
540 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
542 temp->next = *list;
543 if (*list)
544 (*list)->prev = temp;
545 temp->prev = NULL;
546 *list = temp;
549 /* Returns the list of used temp slots at LEVEL. */
551 static struct temp_slot **
552 temp_slots_at_level (int level)
554 level++;
556 if (!used_temp_slots)
557 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
559 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
560 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
562 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
565 /* Returns the maximal temporary slot level. */
567 static int
568 max_slot_level (void)
570 if (!used_temp_slots)
571 return -1;
573 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
576 /* Moves temporary slot TEMP to LEVEL. */
578 static void
579 move_slot_to_level (struct temp_slot *temp, int level)
581 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
582 insert_slot_to_list (temp, temp_slots_at_level (level));
583 temp->level = level;
586 /* Make temporary slot TEMP available. */
588 static void
589 make_slot_available (struct temp_slot *temp)
591 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
592 insert_slot_to_list (temp, &avail_temp_slots);
593 temp->in_use = 0;
594 temp->level = -1;
597 /* Allocate a temporary stack slot and record it for possible later
598 reuse.
600 MODE is the machine mode to be given to the returned rtx.
602 SIZE is the size in units of the space required. We do no rounding here
603 since assign_stack_local will do any required rounding.
605 KEEP is 1 if this slot is to be retained after a call to
606 free_temp_slots. Automatic variables for a block are allocated
607 with this flag. KEEP is 2 if we allocate a longer term temporary,
608 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
609 if we are to allocate something at an inner level to be treated as
610 a variable in the block (e.g., a SAVE_EXPR).
612 TYPE is the type that will be used for the stack slot. */
615 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
616 tree type)
618 unsigned int align;
619 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
620 rtx slot;
622 /* If SIZE is -1 it means that somebody tried to allocate a temporary
623 of a variable size. */
624 if (size == -1)
625 abort ();
627 if (mode == BLKmode)
628 align = BIGGEST_ALIGNMENT;
629 else
630 align = GET_MODE_ALIGNMENT (mode);
632 if (! type)
633 type = lang_hooks.types.type_for_mode (mode, 0);
635 if (type)
636 align = LOCAL_ALIGNMENT (type, align);
638 /* Try to find an available, already-allocated temporary of the proper
639 mode which meets the size and alignment requirements. Choose the
640 smallest one with the closest alignment. */
641 for (p = avail_temp_slots; p; p = p->next)
643 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
644 && objects_must_conflict_p (p->type, type)
645 && (best_p == 0 || best_p->size > p->size
646 || (best_p->size == p->size && best_p->align > p->align)))
648 if (p->align == align && p->size == size)
650 selected = p;
651 cut_slot_from_list (selected, &avail_temp_slots);
652 best_p = 0;
653 break;
655 best_p = p;
659 /* Make our best, if any, the one to use. */
660 if (best_p)
662 selected = best_p;
663 cut_slot_from_list (selected, &avail_temp_slots);
665 /* If there are enough aligned bytes left over, make them into a new
666 temp_slot so that the extra bytes don't get wasted. Do this only
667 for BLKmode slots, so that we can be sure of the alignment. */
668 if (GET_MODE (best_p->slot) == BLKmode)
670 int alignment = best_p->align / BITS_PER_UNIT;
671 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
673 if (best_p->size - rounded_size >= alignment)
675 p = ggc_alloc (sizeof (struct temp_slot));
676 p->in_use = p->addr_taken = 0;
677 p->size = best_p->size - rounded_size;
678 p->base_offset = best_p->base_offset + rounded_size;
679 p->full_size = best_p->full_size - rounded_size;
680 p->slot = gen_rtx_MEM (BLKmode,
681 plus_constant (XEXP (best_p->slot, 0),
682 rounded_size));
683 p->align = best_p->align;
684 p->address = 0;
685 p->type = best_p->type;
686 insert_slot_to_list (p, &avail_temp_slots);
688 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
689 stack_slot_list);
691 best_p->size = rounded_size;
692 best_p->full_size = rounded_size;
697 /* If we still didn't find one, make a new temporary. */
698 if (selected == 0)
700 HOST_WIDE_INT frame_offset_old = frame_offset;
702 p = ggc_alloc (sizeof (struct temp_slot));
704 /* We are passing an explicit alignment request to assign_stack_local.
705 One side effect of that is assign_stack_local will not round SIZE
706 to ensure the frame offset remains suitably aligned.
708 So for requests which depended on the rounding of SIZE, we go ahead
709 and round it now. We also make sure ALIGNMENT is at least
710 BIGGEST_ALIGNMENT. */
711 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
712 abort ();
713 p->slot = assign_stack_local (mode,
714 (mode == BLKmode
715 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
716 : size),
717 align);
719 p->align = align;
721 /* The following slot size computation is necessary because we don't
722 know the actual size of the temporary slot until assign_stack_local
723 has performed all the frame alignment and size rounding for the
724 requested temporary. Note that extra space added for alignment
725 can be either above or below this stack slot depending on which
726 way the frame grows. We include the extra space if and only if it
727 is above this slot. */
728 #ifdef FRAME_GROWS_DOWNWARD
729 p->size = frame_offset_old - frame_offset;
730 #else
731 p->size = size;
732 #endif
734 /* Now define the fields used by combine_temp_slots. */
735 #ifdef FRAME_GROWS_DOWNWARD
736 p->base_offset = frame_offset;
737 p->full_size = frame_offset_old - frame_offset;
738 #else
739 p->base_offset = frame_offset_old;
740 p->full_size = frame_offset - frame_offset_old;
741 #endif
742 p->address = 0;
744 selected = p;
747 p = selected;
748 p->in_use = 1;
749 p->addr_taken = 0;
750 p->type = type;
752 if (keep == 2)
754 p->level = target_temp_slot_level;
755 p->keep = 1;
757 else if (keep == 3)
759 p->level = var_temp_slot_level;
760 p->keep = 0;
762 else
764 p->level = temp_slot_level;
765 p->keep = keep;
768 pp = temp_slots_at_level (p->level);
769 insert_slot_to_list (p, pp);
771 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
772 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
773 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
775 /* If we know the alias set for the memory that will be used, use
776 it. If there's no TYPE, then we don't know anything about the
777 alias set for the memory. */
778 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
779 set_mem_align (slot, align);
781 /* If a type is specified, set the relevant flags. */
782 if (type != 0)
784 RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
785 && TYPE_READONLY (type));
786 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
787 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
790 return slot;
793 /* Allocate a temporary stack slot and record it for possible later
794 reuse. First three arguments are same as in preceding function. */
797 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
799 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
802 /* Assign a temporary.
803 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
804 and so that should be used in error messages. In either case, we
805 allocate of the given type.
806 KEEP is as for assign_stack_temp.
807 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
808 it is 0 if a register is OK.
809 DONT_PROMOTE is 1 if we should not promote values in register
810 to wider modes. */
813 assign_temp (tree type_or_decl, int keep, int memory_required,
814 int dont_promote ATTRIBUTE_UNUSED)
816 tree type, decl;
817 enum machine_mode mode;
818 #ifdef PROMOTE_MODE
819 int unsignedp;
820 #endif
822 if (DECL_P (type_or_decl))
823 decl = type_or_decl, type = TREE_TYPE (decl);
824 else
825 decl = NULL, type = type_or_decl;
827 mode = TYPE_MODE (type);
828 #ifdef PROMOTE_MODE
829 unsignedp = TYPE_UNSIGNED (type);
830 #endif
832 if (mode == BLKmode || memory_required)
834 HOST_WIDE_INT size = int_size_in_bytes (type);
835 tree size_tree;
836 rtx tmp;
838 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
839 problems with allocating the stack space. */
840 if (size == 0)
841 size = 1;
843 /* Unfortunately, we don't yet know how to allocate variable-sized
844 temporaries. However, sometimes we have a fixed upper limit on
845 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
846 instead. This is the case for Chill variable-sized strings. */
847 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
848 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
849 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
850 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
852 /* If we still haven't been able to get a size, see if the language
853 can compute a maximum size. */
854 if (size == -1
855 && (size_tree = lang_hooks.types.max_size (type)) != 0
856 && host_integerp (size_tree, 1))
857 size = tree_low_cst (size_tree, 1);
859 /* The size of the temporary may be too large to fit into an integer. */
860 /* ??? Not sure this should happen except for user silliness, so limit
861 this to things that aren't compiler-generated temporaries. The
862 rest of the time we'll abort in assign_stack_temp_for_type. */
863 if (decl && size == -1
864 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
866 error ("%Jsize of variable '%D' is too large", decl, decl);
867 size = 1;
870 tmp = assign_stack_temp_for_type (mode, size, keep, type);
871 return tmp;
874 #ifdef PROMOTE_MODE
875 if (! dont_promote)
876 mode = promote_mode (type, mode, &unsignedp, 0);
877 #endif
879 return gen_reg_rtx (mode);
882 /* Combine temporary stack slots which are adjacent on the stack.
884 This allows for better use of already allocated stack space. This is only
885 done for BLKmode slots because we can be sure that we won't have alignment
886 problems in this case. */
888 void
889 combine_temp_slots (void)
891 struct temp_slot *p, *q, *next, *next_q;
892 int num_slots;
894 /* We can't combine slots, because the information about which slot
895 is in which alias set will be lost. */
896 if (flag_strict_aliasing)
897 return;
899 /* If there are a lot of temp slots, don't do anything unless
900 high levels of optimization. */
901 if (! flag_expensive_optimizations)
902 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
903 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
904 return;
906 for (p = avail_temp_slots; p; p = next)
908 int delete_p = 0;
910 next = p->next;
912 if (GET_MODE (p->slot) != BLKmode)
913 continue;
915 for (q = p->next; q; q = next_q)
917 int delete_q = 0;
919 next_q = q->next;
921 if (GET_MODE (q->slot) != BLKmode)
922 continue;
924 if (p->base_offset + p->full_size == q->base_offset)
926 /* Q comes after P; combine Q into P. */
927 p->size += q->size;
928 p->full_size += q->full_size;
929 delete_q = 1;
931 else if (q->base_offset + q->full_size == p->base_offset)
933 /* P comes after Q; combine P into Q. */
934 q->size += p->size;
935 q->full_size += p->full_size;
936 delete_p = 1;
937 break;
939 if (delete_q)
940 cut_slot_from_list (q, &avail_temp_slots);
943 /* Either delete P or advance past it. */
944 if (delete_p)
945 cut_slot_from_list (p, &avail_temp_slots);
949 /* Find the temp slot corresponding to the object at address X. */
951 static struct temp_slot *
952 find_temp_slot_from_address (rtx x)
954 struct temp_slot *p;
955 rtx next;
956 int i;
958 for (i = max_slot_level (); i >= 0; i--)
959 for (p = *temp_slots_at_level (i); p; p = p->next)
961 if (XEXP (p->slot, 0) == x
962 || p->address == x
963 || (GET_CODE (x) == PLUS
964 && XEXP (x, 0) == virtual_stack_vars_rtx
965 && GET_CODE (XEXP (x, 1)) == CONST_INT
966 && INTVAL (XEXP (x, 1)) >= p->base_offset
967 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
968 return p;
970 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
971 for (next = p->address; next; next = XEXP (next, 1))
972 if (XEXP (next, 0) == x)
973 return p;
976 /* If we have a sum involving a register, see if it points to a temp
977 slot. */
978 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
979 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
980 return p;
981 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
982 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
983 return p;
985 return 0;
988 /* Indicate that NEW is an alternate way of referring to the temp slot
989 that previously was known by OLD. */
991 void
992 update_temp_slot_address (rtx old, rtx new)
994 struct temp_slot *p;
996 if (rtx_equal_p (old, new))
997 return;
999 p = find_temp_slot_from_address (old);
1001 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1002 is a register, see if one operand of the PLUS is a temporary
1003 location. If so, NEW points into it. Otherwise, if both OLD and
1004 NEW are a PLUS and if there is a register in common between them.
1005 If so, try a recursive call on those values. */
1006 if (p == 0)
1008 if (GET_CODE (old) != PLUS)
1009 return;
1011 if (REG_P (new))
1013 update_temp_slot_address (XEXP (old, 0), new);
1014 update_temp_slot_address (XEXP (old, 1), new);
1015 return;
1017 else if (GET_CODE (new) != PLUS)
1018 return;
1020 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1021 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1022 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1023 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1024 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1025 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1026 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1027 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1029 return;
1032 /* Otherwise add an alias for the temp's address. */
1033 else if (p->address == 0)
1034 p->address = new;
1035 else
1037 if (GET_CODE (p->address) != EXPR_LIST)
1038 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1040 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1044 /* If X could be a reference to a temporary slot, mark the fact that its
1045 address was taken. */
1047 void
1048 mark_temp_addr_taken (rtx x)
1050 struct temp_slot *p;
1052 if (x == 0)
1053 return;
1055 /* If X is not in memory or is at a constant address, it cannot be in
1056 a temporary slot. */
1057 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1058 return;
1060 p = find_temp_slot_from_address (XEXP (x, 0));
1061 if (p != 0)
1062 p->addr_taken = 1;
1065 /* If X could be a reference to a temporary slot, mark that slot as
1066 belonging to the to one level higher than the current level. If X
1067 matched one of our slots, just mark that one. Otherwise, we can't
1068 easily predict which it is, so upgrade all of them. Kept slots
1069 need not be touched.
1071 This is called when an ({...}) construct occurs and a statement
1072 returns a value in memory. */
1074 void
1075 preserve_temp_slots (rtx x)
1077 struct temp_slot *p = 0, *next;
1079 /* If there is no result, we still might have some objects whose address
1080 were taken, so we need to make sure they stay around. */
1081 if (x == 0)
1083 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1085 next = p->next;
1087 if (p->addr_taken)
1088 move_slot_to_level (p, temp_slot_level - 1);
1091 return;
1094 /* If X is a register that is being used as a pointer, see if we have
1095 a temporary slot we know it points to. To be consistent with
1096 the code below, we really should preserve all non-kept slots
1097 if we can't find a match, but that seems to be much too costly. */
1098 if (REG_P (x) && REG_POINTER (x))
1099 p = find_temp_slot_from_address (x);
1101 /* If X is not in memory or is at a constant address, it cannot be in
1102 a temporary slot, but it can contain something whose address was
1103 taken. */
1104 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1106 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1108 next = p->next;
1110 if (p->addr_taken)
1111 move_slot_to_level (p, temp_slot_level - 1);
1114 return;
1117 /* First see if we can find a match. */
1118 if (p == 0)
1119 p = find_temp_slot_from_address (XEXP (x, 0));
1121 if (p != 0)
1123 /* Move everything at our level whose address was taken to our new
1124 level in case we used its address. */
1125 struct temp_slot *q;
1127 if (p->level == temp_slot_level)
1129 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1131 next = q->next;
1133 if (p != q && q->addr_taken)
1134 move_slot_to_level (q, temp_slot_level - 1);
1137 move_slot_to_level (p, temp_slot_level - 1);
1138 p->addr_taken = 0;
1140 return;
1143 /* Otherwise, preserve all non-kept slots at this level. */
1144 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1146 next = p->next;
1148 if (!p->keep)
1149 move_slot_to_level (p, temp_slot_level - 1);
1153 /* Free all temporaries used so far. This is normally called at the
1154 end of generating code for a statement. */
1156 void
1157 free_temp_slots (void)
1159 struct temp_slot *p, *next;
1161 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1163 next = p->next;
1165 if (!p->keep)
1166 make_slot_available (p);
1169 combine_temp_slots ();
1172 /* Push deeper into the nesting level for stack temporaries. */
1174 void
1175 push_temp_slots (void)
1177 temp_slot_level++;
1180 /* Pop a temporary nesting level. All slots in use in the current level
1181 are freed. */
1183 void
1184 pop_temp_slots (void)
1186 struct temp_slot *p, *next;
1188 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1190 next = p->next;
1191 make_slot_available (p);
1194 combine_temp_slots ();
1196 temp_slot_level--;
1199 /* Initialize temporary slots. */
1201 void
1202 init_temp_slots (void)
1204 /* We have not allocated any temporaries yet. */
1205 avail_temp_slots = 0;
1206 used_temp_slots = 0;
1207 temp_slot_level = 0;
1208 var_temp_slot_level = 0;
1209 target_temp_slot_level = 0;
1212 /* These routines are responsible for converting virtual register references
1213 to the actual hard register references once RTL generation is complete.
1215 The following four variables are used for communication between the
1216 routines. They contain the offsets of the virtual registers from their
1217 respective hard registers. */
1219 static int in_arg_offset;
1220 static int var_offset;
1221 static int dynamic_offset;
1222 static int out_arg_offset;
1223 static int cfa_offset;
1225 /* In most machines, the stack pointer register is equivalent to the bottom
1226 of the stack. */
1228 #ifndef STACK_POINTER_OFFSET
1229 #define STACK_POINTER_OFFSET 0
1230 #endif
1232 /* If not defined, pick an appropriate default for the offset of dynamically
1233 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1234 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1236 #ifndef STACK_DYNAMIC_OFFSET
1238 /* The bottom of the stack points to the actual arguments. If
1239 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1240 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1241 stack space for register parameters is not pushed by the caller, but
1242 rather part of the fixed stack areas and hence not included in
1243 `current_function_outgoing_args_size'. Nevertheless, we must allow
1244 for it when allocating stack dynamic objects. */
1246 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1247 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1248 ((ACCUMULATE_OUTGOING_ARGS \
1249 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1250 + (STACK_POINTER_OFFSET)) \
1252 #else
1253 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1254 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1255 + (STACK_POINTER_OFFSET))
1256 #endif
1257 #endif
1259 /* On most machines, the CFA coincides with the first incoming parm. */
1261 #ifndef ARG_POINTER_CFA_OFFSET
1262 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1263 #endif
1266 /* Convert a SET of a hard subreg to a set of the appropriate hard
1267 register. A subroutine of purge_hard_subreg_sets. */
1269 static void
1270 purge_single_hard_subreg_set (rtx pattern)
1272 rtx reg = SET_DEST (pattern);
1273 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
1274 int offset = 0;
1276 if (GET_CODE (reg) == SUBREG && REG_P (SUBREG_REG (reg))
1277 && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
1279 offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
1280 GET_MODE (SUBREG_REG (reg)),
1281 SUBREG_BYTE (reg),
1282 GET_MODE (reg));
1283 reg = SUBREG_REG (reg);
1287 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1289 reg = gen_rtx_REG (mode, REGNO (reg) + offset);
1290 SET_DEST (pattern) = reg;
1294 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
1295 only such SETs that we expect to see are those left in because
1296 integrate can't handle sets of parts of a return value register.
1298 We don't use alter_subreg because we only want to eliminate subregs
1299 of hard registers. */
1301 void
1302 purge_hard_subreg_sets (rtx insn)
1304 for (; insn; insn = NEXT_INSN (insn))
1306 if (INSN_P (insn))
1308 rtx pattern = PATTERN (insn);
1309 switch (GET_CODE (pattern))
1311 case SET:
1312 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
1313 purge_single_hard_subreg_set (pattern);
1314 break;
1315 case PARALLEL:
1317 int j;
1318 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
1320 rtx inner_pattern = XVECEXP (pattern, 0, j);
1321 if (GET_CODE (inner_pattern) == SET
1322 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
1323 purge_single_hard_subreg_set (inner_pattern);
1326 break;
1327 default:
1328 break;
1334 /* Pass through the INSNS of function FNDECL and convert virtual register
1335 references to hard register references. */
1337 void
1338 instantiate_virtual_regs (void)
1340 rtx insn;
1342 /* Compute the offsets to use for this function. */
1343 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1344 var_offset = STARTING_FRAME_OFFSET;
1345 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1346 out_arg_offset = STACK_POINTER_OFFSET;
1347 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1349 /* Scan all variables and parameters of this function. For each that is
1350 in memory, instantiate all virtual registers if the result is a valid
1351 address. If not, we do it later. That will handle most uses of virtual
1352 regs on many machines. */
1353 instantiate_decls (current_function_decl, 1);
1355 /* Initialize recognition, indicating that volatile is OK. */
1356 init_recog ();
1358 /* Scan through all the insns, instantiating every virtual register still
1359 present. */
1360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1361 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1362 || GET_CODE (insn) == CALL_INSN)
1364 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1365 if (INSN_DELETED_P (insn))
1366 continue;
1367 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1368 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1369 if (GET_CODE (insn) == CALL_INSN)
1370 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1371 NULL_RTX, 0);
1373 /* Past this point all ASM statements should match. Verify that
1374 to avoid failures later in the compilation process. */
1375 if (asm_noperands (PATTERN (insn)) >= 0
1376 && ! check_asm_operands (PATTERN (insn)))
1377 instantiate_virtual_regs_lossage (insn);
1380 /* Now instantiate the remaining register equivalences for debugging info.
1381 These will not be valid addresses. */
1382 instantiate_decls (current_function_decl, 0);
1384 /* Indicate that, from now on, assign_stack_local should use
1385 frame_pointer_rtx. */
1386 virtuals_instantiated = 1;
1389 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1390 all virtual registers in their DECL_RTL's.
1392 If VALID_ONLY, do this only if the resulting address is still valid.
1393 Otherwise, always do it. */
1395 static void
1396 instantiate_decls (tree fndecl, int valid_only)
1398 tree decl;
1400 /* Process all parameters of the function. */
1401 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1403 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1404 HOST_WIDE_INT size_rtl;
1406 instantiate_decl (DECL_RTL (decl), size, valid_only);
1408 /* If the parameter was promoted, then the incoming RTL mode may be
1409 larger than the declared type size. We must use the larger of
1410 the two sizes. */
1411 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1412 size = MAX (size_rtl, size);
1413 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1416 /* Now process all variables defined in the function or its subblocks. */
1417 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1420 /* Subroutine of instantiate_decls: Process all decls in the given
1421 BLOCK node and all its subblocks. */
1423 static void
1424 instantiate_decls_1 (tree let, int valid_only)
1426 tree t;
1428 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1429 if (DECL_RTL_SET_P (t))
1430 instantiate_decl (DECL_RTL (t),
1431 int_size_in_bytes (TREE_TYPE (t)),
1432 valid_only);
1434 /* Process all subblocks. */
1435 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1436 instantiate_decls_1 (t, valid_only);
1439 /* Subroutine of the preceding procedures: Given RTL representing a
1440 decl and the size of the object, do any instantiation required.
1442 If VALID_ONLY is nonzero, it means that the RTL should only be
1443 changed if the new address is valid. */
1445 static void
1446 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1448 enum machine_mode mode;
1449 rtx addr;
1451 /* If this is not a MEM, no need to do anything. Similarly if the
1452 address is a constant or a register that is not a virtual register. */
1454 if (x == 0 || !MEM_P (x))
1455 return;
1457 addr = XEXP (x, 0);
1458 if (CONSTANT_P (addr)
1459 || (REG_P (addr)
1460 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1461 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1462 return;
1464 /* If we should only do this if the address is valid, copy the address.
1465 We need to do this so we can undo any changes that might make the
1466 address invalid. This copy is unfortunate, but probably can't be
1467 avoided. */
1469 if (valid_only)
1470 addr = copy_rtx (addr);
1472 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1474 if (valid_only && size >= 0)
1476 unsigned HOST_WIDE_INT decl_size = size;
1478 /* Now verify that the resulting address is valid for every integer or
1479 floating-point mode up to and including SIZE bytes long. We do this
1480 since the object might be accessed in any mode and frame addresses
1481 are shared. */
1483 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1484 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1485 mode = GET_MODE_WIDER_MODE (mode))
1486 if (! memory_address_p (mode, addr))
1487 return;
1489 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1490 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1491 mode = GET_MODE_WIDER_MODE (mode))
1492 if (! memory_address_p (mode, addr))
1493 return;
1496 /* Put back the address now that we have updated it and we either know
1497 it is valid or we don't care whether it is valid. */
1499 XEXP (x, 0) = addr;
1502 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1503 is a virtual register, return the equivalent hard register and set the
1504 offset indirectly through the pointer. Otherwise, return 0. */
1506 static rtx
1507 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1509 rtx new;
1510 HOST_WIDE_INT offset;
1512 if (x == virtual_incoming_args_rtx)
1513 new = arg_pointer_rtx, offset = in_arg_offset;
1514 else if (x == virtual_stack_vars_rtx)
1515 new = frame_pointer_rtx, offset = var_offset;
1516 else if (x == virtual_stack_dynamic_rtx)
1517 new = stack_pointer_rtx, offset = dynamic_offset;
1518 else if (x == virtual_outgoing_args_rtx)
1519 new = stack_pointer_rtx, offset = out_arg_offset;
1520 else if (x == virtual_cfa_rtx)
1521 new = arg_pointer_rtx, offset = cfa_offset;
1522 else
1523 return 0;
1525 *poffset = offset;
1526 return new;
1530 /* Called when instantiate_virtual_regs has failed to update the instruction.
1531 Usually this means that non-matching instruction has been emit, however for
1532 asm statements it may be the problem in the constraints. */
1533 static void
1534 instantiate_virtual_regs_lossage (rtx insn)
1536 if (asm_noperands (PATTERN (insn)) >= 0)
1538 error_for_asm (insn, "impossible constraint in `asm'");
1539 delete_insn (insn);
1541 else
1542 abort ();
1544 /* Given a pointer to a piece of rtx and an optional pointer to the
1545 containing object, instantiate any virtual registers present in it.
1547 If EXTRA_INSNS, we always do the replacement and generate
1548 any extra insns before OBJECT. If it zero, we do nothing if replacement
1549 is not valid.
1551 Return 1 if we either had nothing to do or if we were able to do the
1552 needed replacement. Return 0 otherwise; we only return zero if
1553 EXTRA_INSNS is zero.
1555 We first try some simple transformations to avoid the creation of extra
1556 pseudos. */
1558 static int
1559 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1561 rtx x;
1562 RTX_CODE code;
1563 rtx new = 0;
1564 HOST_WIDE_INT offset = 0;
1565 rtx temp;
1566 rtx seq;
1567 int i, j;
1568 const char *fmt;
1570 /* Re-start here to avoid recursion in common cases. */
1571 restart:
1573 x = *loc;
1574 if (x == 0)
1575 return 1;
1577 /* We may have detected and deleted invalid asm statements. */
1578 if (object && INSN_P (object) && INSN_DELETED_P (object))
1579 return 1;
1581 code = GET_CODE (x);
1583 /* Check for some special cases. */
1584 switch (code)
1586 case CONST_INT:
1587 case CONST_DOUBLE:
1588 case CONST_VECTOR:
1589 case CONST:
1590 case SYMBOL_REF:
1591 case CODE_LABEL:
1592 case PC:
1593 case CC0:
1594 case ASM_INPUT:
1595 case ADDR_VEC:
1596 case ADDR_DIFF_VEC:
1597 case RETURN:
1598 return 1;
1600 case SET:
1601 /* We are allowed to set the virtual registers. This means that
1602 the actual register should receive the source minus the
1603 appropriate offset. This is used, for example, in the handling
1604 of non-local gotos. */
1605 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1607 rtx src = SET_SRC (x);
1609 /* We are setting the register, not using it, so the relevant
1610 offset is the negative of the offset to use were we using
1611 the register. */
1612 offset = - offset;
1613 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1615 /* The only valid sources here are PLUS or REG. Just do
1616 the simplest possible thing to handle them. */
1617 if (!REG_P (src) && GET_CODE (src) != PLUS)
1619 instantiate_virtual_regs_lossage (object);
1620 return 1;
1623 start_sequence ();
1624 if (!REG_P (src))
1625 temp = force_operand (src, NULL_RTX);
1626 else
1627 temp = src;
1628 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1629 seq = get_insns ();
1630 end_sequence ();
1632 emit_insn_before (seq, object);
1633 SET_DEST (x) = new;
1635 if (! validate_change (object, &SET_SRC (x), temp, 0)
1636 || ! extra_insns)
1637 instantiate_virtual_regs_lossage (object);
1639 return 1;
1642 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1643 loc = &SET_SRC (x);
1644 goto restart;
1646 case PLUS:
1647 /* Handle special case of virtual register plus constant. */
1648 if (CONSTANT_P (XEXP (x, 1)))
1650 rtx old, new_offset;
1652 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1653 if (GET_CODE (XEXP (x, 0)) == PLUS)
1655 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1657 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1658 extra_insns);
1659 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1661 else
1663 loc = &XEXP (x, 0);
1664 goto restart;
1668 #ifdef POINTERS_EXTEND_UNSIGNED
1669 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1670 we can commute the PLUS and SUBREG because pointers into the
1671 frame are well-behaved. */
1672 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1673 && GET_CODE (XEXP (x, 1)) == CONST_INT
1674 && 0 != (new
1675 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1676 &offset))
1677 && validate_change (object, loc,
1678 plus_constant (gen_lowpart (ptr_mode,
1679 new),
1680 offset
1681 + INTVAL (XEXP (x, 1))),
1683 return 1;
1684 #endif
1685 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1687 /* We know the second operand is a constant. Unless the
1688 first operand is a REG (which has been already checked),
1689 it needs to be checked. */
1690 if (!REG_P (XEXP (x, 0)))
1692 loc = &XEXP (x, 0);
1693 goto restart;
1695 return 1;
1698 new_offset = plus_constant (XEXP (x, 1), offset);
1700 /* If the new constant is zero, try to replace the sum with just
1701 the register. */
1702 if (new_offset == const0_rtx
1703 && validate_change (object, loc, new, 0))
1704 return 1;
1706 /* Next try to replace the register and new offset.
1707 There are two changes to validate here and we can't assume that
1708 in the case of old offset equals new just changing the register
1709 will yield a valid insn. In the interests of a little efficiency,
1710 however, we only call validate change once (we don't queue up the
1711 changes and then call apply_change_group). */
1713 old = XEXP (x, 0);
1714 if (offset == 0
1715 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1716 : (XEXP (x, 0) = new,
1717 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1719 if (! extra_insns)
1721 XEXP (x, 0) = old;
1722 return 0;
1725 /* Otherwise copy the new constant into a register and replace
1726 constant with that register. */
1727 temp = gen_reg_rtx (Pmode);
1728 XEXP (x, 0) = new;
1729 if (validate_change (object, &XEXP (x, 1), temp, 0))
1730 emit_insn_before (gen_move_insn (temp, new_offset), object);
1731 else
1733 /* If that didn't work, replace this expression with a
1734 register containing the sum. */
1736 XEXP (x, 0) = old;
1737 new = gen_rtx_PLUS (Pmode, new, new_offset);
1739 start_sequence ();
1740 temp = force_operand (new, NULL_RTX);
1741 seq = get_insns ();
1742 end_sequence ();
1744 emit_insn_before (seq, object);
1745 if (! validate_change (object, loc, temp, 0)
1746 && ! validate_replace_rtx (x, temp, object))
1748 instantiate_virtual_regs_lossage (object);
1749 return 1;
1754 return 1;
1757 /* Fall through to generic two-operand expression case. */
1758 case EXPR_LIST:
1759 case CALL:
1760 case COMPARE:
1761 case MINUS:
1762 case MULT:
1763 case DIV: case UDIV:
1764 case MOD: case UMOD:
1765 case AND: case IOR: case XOR:
1766 case ROTATERT: case ROTATE:
1767 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1768 case NE: case EQ:
1769 case GE: case GT: case GEU: case GTU:
1770 case LE: case LT: case LEU: case LTU:
1771 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1772 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1773 loc = &XEXP (x, 0);
1774 goto restart;
1776 case MEM:
1777 /* Most cases of MEM that convert to valid addresses have already been
1778 handled by our scan of decls. The only special handling we
1779 need here is to make a copy of the rtx to ensure it isn't being
1780 shared if we have to change it to a pseudo.
1782 If the rtx is a simple reference to an address via a virtual register,
1783 it can potentially be shared. In such cases, first try to make it
1784 a valid address, which can also be shared. Otherwise, copy it and
1785 proceed normally.
1787 First check for common cases that need no processing. These are
1788 usually due to instantiation already being done on a previous instance
1789 of a shared rtx. */
1791 temp = XEXP (x, 0);
1792 if (CONSTANT_ADDRESS_P (temp)
1793 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1794 || temp == arg_pointer_rtx
1795 #endif
1796 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1797 || temp == hard_frame_pointer_rtx
1798 #endif
1799 || temp == frame_pointer_rtx)
1800 return 1;
1802 if (GET_CODE (temp) == PLUS
1803 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1804 && (XEXP (temp, 0) == frame_pointer_rtx
1805 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1806 || XEXP (temp, 0) == hard_frame_pointer_rtx
1807 #endif
1808 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1809 || XEXP (temp, 0) == arg_pointer_rtx
1810 #endif
1812 return 1;
1814 if (temp == virtual_stack_vars_rtx
1815 || temp == virtual_incoming_args_rtx
1816 || (GET_CODE (temp) == PLUS
1817 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1818 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1819 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1821 /* This MEM may be shared. If the substitution can be done without
1822 the need to generate new pseudos, we want to do it in place
1823 so all copies of the shared rtx benefit. The call below will
1824 only make substitutions if the resulting address is still
1825 valid.
1827 Note that we cannot pass X as the object in the recursive call
1828 since the insn being processed may not allow all valid
1829 addresses. However, if we were not passed on object, we can
1830 only modify X without copying it if X will have a valid
1831 address.
1833 ??? Also note that this can still lose if OBJECT is an insn that
1834 has less restrictions on an address that some other insn.
1835 In that case, we will modify the shared address. This case
1836 doesn't seem very likely, though. One case where this could
1837 happen is in the case of a USE or CLOBBER reference, but we
1838 take care of that below. */
1840 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1841 object ? object : x, 0))
1842 return 1;
1844 /* Otherwise make a copy and process that copy. We copy the entire
1845 RTL expression since it might be a PLUS which could also be
1846 shared. */
1847 *loc = x = copy_rtx (x);
1850 /* Fall through to generic unary operation case. */
1851 case PREFETCH:
1852 case SUBREG:
1853 case STRICT_LOW_PART:
1854 case NEG: case NOT:
1855 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1856 case SIGN_EXTEND: case ZERO_EXTEND:
1857 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1858 case FLOAT: case FIX:
1859 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1860 case ABS:
1861 case SQRT:
1862 case FFS:
1863 case CLZ: case CTZ:
1864 case POPCOUNT: case PARITY:
1865 /* These case either have just one operand or we know that we need not
1866 check the rest of the operands. */
1867 loc = &XEXP (x, 0);
1868 goto restart;
1870 case USE:
1871 case CLOBBER:
1872 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1873 go ahead and make the invalid one, but do it to a copy. For a REG,
1874 just make the recursive call, since there's no chance of a problem. */
1876 if ((MEM_P (XEXP (x, 0))
1877 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1879 || (REG_P (XEXP (x, 0))
1880 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1881 return 1;
1883 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1884 loc = &XEXP (x, 0);
1885 goto restart;
1887 case REG:
1888 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1889 in front of this insn and substitute the temporary. */
1890 if ((new = instantiate_new_reg (x, &offset)) != 0)
1892 temp = plus_constant (new, offset);
1893 if (!validate_change (object, loc, temp, 0))
1895 if (! extra_insns)
1896 return 0;
1898 start_sequence ();
1899 temp = force_operand (temp, NULL_RTX);
1900 seq = get_insns ();
1901 end_sequence ();
1903 emit_insn_before (seq, object);
1904 if (! validate_change (object, loc, temp, 0)
1905 && ! validate_replace_rtx (x, temp, object))
1906 instantiate_virtual_regs_lossage (object);
1910 return 1;
1912 default:
1913 break;
1916 /* Scan all subexpressions. */
1917 fmt = GET_RTX_FORMAT (code);
1918 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1919 if (*fmt == 'e')
1921 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1922 return 0;
1924 else if (*fmt == 'E')
1925 for (j = 0; j < XVECLEN (x, i); j++)
1926 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1927 extra_insns))
1928 return 0;
1930 return 1;
1933 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1934 This means a type for which function calls must pass an address to the
1935 function or get an address back from the function.
1936 EXP may be a type node or an expression (whose type is tested). */
1939 aggregate_value_p (tree exp, tree fntype)
1941 int i, regno, nregs;
1942 rtx reg;
1944 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1946 if (fntype)
1947 switch (TREE_CODE (fntype))
1949 case CALL_EXPR:
1950 fntype = get_callee_fndecl (fntype);
1951 fntype = fntype ? TREE_TYPE (fntype) : 0;
1952 break;
1953 case FUNCTION_DECL:
1954 fntype = TREE_TYPE (fntype);
1955 break;
1956 case FUNCTION_TYPE:
1957 case METHOD_TYPE:
1958 break;
1959 case IDENTIFIER_NODE:
1960 fntype = 0;
1961 break;
1962 default:
1963 /* We don't expect other rtl types here. */
1964 abort();
1967 if (TREE_CODE (type) == VOID_TYPE)
1968 return 0;
1969 if (targetm.calls.return_in_memory (type, fntype))
1970 return 1;
1971 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1972 and thus can't be returned in registers. */
1973 if (TREE_ADDRESSABLE (type))
1974 return 1;
1975 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1976 return 1;
1977 /* Make sure we have suitable call-clobbered regs to return
1978 the value in; if not, we must return it in memory. */
1979 reg = hard_function_value (type, 0, 0);
1981 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1982 it is OK. */
1983 if (!REG_P (reg))
1984 return 0;
1986 regno = REGNO (reg);
1987 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1988 for (i = 0; i < nregs; i++)
1989 if (! call_used_regs[regno + i])
1990 return 1;
1991 return 0;
1994 /* Return true if we should assign DECL a pseudo register; false if it
1995 should live on the local stack. */
1997 bool
1998 use_register_for_decl (tree decl)
2000 /* Honor volatile. */
2001 if (TREE_SIDE_EFFECTS (decl))
2002 return false;
2004 /* Honor addressability. */
2005 if (TREE_ADDRESSABLE (decl))
2006 return false;
2008 /* Only register-like things go in registers. */
2009 if (DECL_MODE (decl) == BLKmode)
2010 return false;
2012 /* If -ffloat-store specified, don't put explicit float variables
2013 into registers. */
2014 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2015 propagates values across these stores, and it probably shouldn't. */
2016 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2017 return false;
2019 /* Compiler-generated temporaries can always go in registers. */
2020 if (DECL_ARTIFICIAL (decl))
2021 return true;
2023 #ifdef NON_SAVING_SETJMP
2024 /* Protect variables not declared "register" from setjmp. */
2025 if (NON_SAVING_SETJMP
2026 && current_function_calls_setjmp
2027 && !DECL_REGISTER (decl))
2028 return false;
2029 #endif
2031 return (optimize || DECL_REGISTER (decl));
2034 /* Structures to communicate between the subroutines of assign_parms.
2035 The first holds data persistent across all parameters, the second
2036 is cleared out for each parameter. */
2038 struct assign_parm_data_all
2040 CUMULATIVE_ARGS args_so_far;
2041 struct args_size stack_args_size;
2042 tree function_result_decl;
2043 tree orig_fnargs;
2044 rtx conversion_insns;
2045 HOST_WIDE_INT pretend_args_size;
2046 HOST_WIDE_INT extra_pretend_bytes;
2047 int reg_parm_stack_space;
2050 struct assign_parm_data_one
2052 tree nominal_type;
2053 tree passed_type;
2054 rtx entry_parm;
2055 rtx stack_parm;
2056 enum machine_mode nominal_mode;
2057 enum machine_mode passed_mode;
2058 enum machine_mode promoted_mode;
2059 struct locate_and_pad_arg_data locate;
2060 int partial;
2061 BOOL_BITFIELD named_arg : 1;
2062 BOOL_BITFIELD last_named : 1;
2063 BOOL_BITFIELD passed_pointer : 1;
2064 BOOL_BITFIELD on_stack : 1;
2065 BOOL_BITFIELD loaded_in_reg : 1;
2068 /* A subroutine of assign_parms. Initialize ALL. */
2070 static void
2071 assign_parms_initialize_all (struct assign_parm_data_all *all)
2073 tree fntype;
2075 memset (all, 0, sizeof (*all));
2077 fntype = TREE_TYPE (current_function_decl);
2079 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2080 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2081 #else
2082 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2083 current_function_decl, -1);
2084 #endif
2086 #ifdef REG_PARM_STACK_SPACE
2087 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2088 #endif
2091 /* If ARGS contains entries with complex types, split the entry into two
2092 entries of the component type. Return a new list of substitutions are
2093 needed, else the old list. */
2095 static tree
2096 split_complex_args (tree args)
2098 tree p;
2100 /* Before allocating memory, check for the common case of no complex. */
2101 for (p = args; p; p = TREE_CHAIN (p))
2103 tree type = TREE_TYPE (p);
2104 if (TREE_CODE (type) == COMPLEX_TYPE
2105 && targetm.calls.split_complex_arg (type))
2106 goto found;
2108 return args;
2110 found:
2111 args = copy_list (args);
2113 for (p = args; p; p = TREE_CHAIN (p))
2115 tree type = TREE_TYPE (p);
2116 if (TREE_CODE (type) == COMPLEX_TYPE
2117 && targetm.calls.split_complex_arg (type))
2119 tree decl;
2120 tree subtype = TREE_TYPE (type);
2122 /* Rewrite the PARM_DECL's type with its component. */
2123 TREE_TYPE (p) = subtype;
2124 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2125 DECL_MODE (p) = VOIDmode;
2126 DECL_SIZE (p) = NULL;
2127 DECL_SIZE_UNIT (p) = NULL;
2128 layout_decl (p, 0);
2130 /* Build a second synthetic decl. */
2131 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2132 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2133 layout_decl (decl, 0);
2135 /* Splice it in; skip the new decl. */
2136 TREE_CHAIN (decl) = TREE_CHAIN (p);
2137 TREE_CHAIN (p) = decl;
2138 p = decl;
2142 return args;
2145 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2146 the hidden struct return argument, and (abi willing) complex args.
2147 Return the new parameter list. */
2149 static tree
2150 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2152 tree fndecl = current_function_decl;
2153 tree fntype = TREE_TYPE (fndecl);
2154 tree fnargs = DECL_ARGUMENTS (fndecl);
2156 /* If struct value address is treated as the first argument, make it so. */
2157 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2158 && ! current_function_returns_pcc_struct
2159 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2161 tree type = build_pointer_type (TREE_TYPE (fntype));
2162 tree decl;
2164 decl = build_decl (PARM_DECL, NULL_TREE, type);
2165 DECL_ARG_TYPE (decl) = type;
2166 DECL_ARTIFICIAL (decl) = 1;
2168 TREE_CHAIN (decl) = fnargs;
2169 fnargs = decl;
2170 all->function_result_decl = decl;
2173 all->orig_fnargs = fnargs;
2175 /* If the target wants to split complex arguments into scalars, do so. */
2176 if (targetm.calls.split_complex_arg)
2177 fnargs = split_complex_args (fnargs);
2179 return fnargs;
2182 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2183 data for the parameter. Incorporate ABI specifics such as pass-by-
2184 reference and type promotion. */
2186 static void
2187 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2188 struct assign_parm_data_one *data)
2190 tree nominal_type, passed_type;
2191 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2193 memset (data, 0, sizeof (*data));
2195 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2196 if (current_function_stdarg)
2198 tree tem;
2199 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2200 if (DECL_NAME (tem))
2201 break;
2202 if (tem == 0)
2203 data->last_named = true;
2206 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2207 most machines, if this is a varargs/stdarg function, then we treat
2208 the last named arg as if it were anonymous too. */
2209 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2210 data->named_arg = 1;
2211 else
2212 data->named_arg = !data->last_named;
2214 nominal_type = TREE_TYPE (parm);
2215 passed_type = DECL_ARG_TYPE (parm);
2217 /* Look out for errors propagating this far. Also, if the parameter's
2218 type is void then its value doesn't matter. */
2219 if (TREE_TYPE (parm) == error_mark_node
2220 /* This can happen after weird syntax errors
2221 or if an enum type is defined among the parms. */
2222 || TREE_CODE (parm) != PARM_DECL
2223 || passed_type == NULL
2224 || VOID_TYPE_P (nominal_type))
2226 nominal_type = passed_type = void_type_node;
2227 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2228 goto egress;
2231 /* Find mode of arg as it is passed, and mode of arg as it should be
2232 during execution of this function. */
2233 passed_mode = TYPE_MODE (passed_type);
2234 nominal_mode = TYPE_MODE (nominal_type);
2236 /* If the parm is to be passed as a transparent union, use the type of
2237 the first field for the tests below. We have already verified that
2238 the modes are the same. */
2239 if (DECL_TRANSPARENT_UNION (parm)
2240 || (TREE_CODE (passed_type) == UNION_TYPE
2241 && TYPE_TRANSPARENT_UNION (passed_type)))
2242 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2244 /* See if this arg was passed by invisible reference. It is if it is an
2245 object whose size depends on the contents of the object itself or if
2246 the machine requires these objects be passed that way. */
2247 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
2248 || TREE_ADDRESSABLE (passed_type)
2249 || FUNCTION_ARG_PASS_BY_REFERENCE (all->args_so_far, passed_mode,
2250 passed_type, data->named_arg))
2252 passed_type = nominal_type = build_pointer_type (passed_type);
2253 data->passed_pointer = true;
2254 passed_mode = nominal_mode = Pmode;
2256 /* See if the frontend wants to pass this by invisible reference. */
2257 else if (passed_type != nominal_type
2258 && POINTER_TYPE_P (passed_type)
2259 && TREE_TYPE (passed_type) == nominal_type)
2261 nominal_type = passed_type;
2262 data->passed_pointer = 1;
2263 passed_mode = nominal_mode = Pmode;
2266 /* Find mode as it is passed by the ABI. */
2267 promoted_mode = passed_mode;
2268 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2270 int unsignedp = TYPE_UNSIGNED (passed_type);
2271 promoted_mode = promote_mode (passed_type, promoted_mode,
2272 &unsignedp, 1);
2275 egress:
2276 data->nominal_type = nominal_type;
2277 data->passed_type = passed_type;
2278 data->nominal_mode = nominal_mode;
2279 data->passed_mode = passed_mode;
2280 data->promoted_mode = promoted_mode;
2283 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2285 static void
2286 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2287 struct assign_parm_data_one *data, bool no_rtl)
2289 int varargs_pretend_bytes = 0;
2291 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2292 data->promoted_mode,
2293 data->passed_type,
2294 &varargs_pretend_bytes, no_rtl);
2296 /* If the back-end has requested extra stack space, record how much is
2297 needed. Do not change pretend_args_size otherwise since it may be
2298 nonzero from an earlier partial argument. */
2299 if (varargs_pretend_bytes > 0)
2300 all->pretend_args_size = varargs_pretend_bytes;
2303 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2304 the incoming location of the current parameter. */
2306 static void
2307 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2308 struct assign_parm_data_one *data)
2310 HOST_WIDE_INT pretend_bytes = 0;
2311 rtx entry_parm;
2312 bool in_regs;
2314 if (data->promoted_mode == VOIDmode)
2316 data->entry_parm = data->stack_parm = const0_rtx;
2317 return;
2320 #ifdef FUNCTION_INCOMING_ARG
2321 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2322 data->passed_type, data->named_arg);
2323 #else
2324 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2325 data->passed_type, data->named_arg);
2326 #endif
2328 if (entry_parm == 0)
2329 data->promoted_mode = data->passed_mode;
2331 /* Determine parm's home in the stack, in case it arrives in the stack
2332 or we should pretend it did. Compute the stack position and rtx where
2333 the argument arrives and its size.
2335 There is one complexity here: If this was a parameter that would
2336 have been passed in registers, but wasn't only because it is
2337 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2338 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2339 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2340 as it was the previous time. */
2341 in_regs = entry_parm != 0;
2342 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2343 in_regs = true;
2344 #endif
2345 if (!in_regs && !data->named_arg)
2347 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2349 rtx tem;
2350 #ifdef FUNCTION_INCOMING_ARG
2351 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2352 data->passed_type, true);
2353 #else
2354 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2355 data->passed_type, true);
2356 #endif
2357 in_regs = tem != NULL;
2361 /* If this parameter was passed both in registers and in the stack, use
2362 the copy on the stack. */
2363 if (MUST_PASS_IN_STACK (data->promoted_mode, data->passed_type))
2364 entry_parm = 0;
2366 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2367 if (entry_parm)
2369 int partial;
2371 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2372 data->promoted_mode,
2373 data->passed_type,
2374 data->named_arg);
2375 data->partial = partial;
2377 /* The caller might already have allocated stack space for the
2378 register parameters. */
2379 if (partial != 0 && all->reg_parm_stack_space == 0)
2381 /* Part of this argument is passed in registers and part
2382 is passed on the stack. Ask the prologue code to extend
2383 the stack part so that we can recreate the full value.
2385 PRETEND_BYTES is the size of the registers we need to store.
2386 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2387 stack space that the prologue should allocate.
2389 Internally, gcc assumes that the argument pointer is aligned
2390 to STACK_BOUNDARY bits. This is used both for alignment
2391 optimizations (see init_emit) and to locate arguments that are
2392 aligned to more than PARM_BOUNDARY bits. We must preserve this
2393 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2394 a stack boundary. */
2396 /* We assume at most one partial arg, and it must be the first
2397 argument on the stack. */
2398 if (all->extra_pretend_bytes || all->pretend_args_size)
2399 abort ();
2401 pretend_bytes = partial * UNITS_PER_WORD;
2402 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2404 /* We want to align relative to the actual stack pointer, so
2405 don't include this in the stack size until later. */
2406 all->extra_pretend_bytes = all->pretend_args_size;
2409 #endif
2411 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2412 entry_parm ? data->partial : 0, current_function_decl,
2413 &all->stack_args_size, &data->locate);
2415 /* Adjust offsets to include the pretend args. */
2416 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2417 data->locate.slot_offset.constant += pretend_bytes;
2418 data->locate.offset.constant += pretend_bytes;
2420 data->entry_parm = entry_parm;
2423 /* A subroutine of assign_parms. If there is actually space on the stack
2424 for this parm, count it in stack_args_size and return true. */
2426 static bool
2427 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2428 struct assign_parm_data_one *data)
2430 /* Trivially true if we've no incomming register. */
2431 if (data->entry_parm == NULL)
2433 /* Also true if we're partially in registers and partially not,
2434 since we've arranged to drop the entire argument on the stack. */
2435 else if (data->partial != 0)
2437 /* Also true if the target says that it's passed in both registers
2438 and on the stack. */
2439 else if (GET_CODE (data->entry_parm) == PARALLEL
2440 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2442 /* Also true if the target says that there's stack allocated for
2443 all register parameters. */
2444 else if (all->reg_parm_stack_space > 0)
2446 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2447 else
2448 return false;
2450 all->stack_args_size.constant += data->locate.size.constant;
2451 if (data->locate.size.var)
2452 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2454 return true;
2457 /* A subroutine of assign_parms. Given that this parameter is allocated
2458 stack space by the ABI, find it. */
2460 static void
2461 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2463 rtx offset_rtx, stack_parm;
2464 unsigned int align, boundary;
2466 /* If we're passing this arg using a reg, make its stack home the
2467 aligned stack slot. */
2468 if (data->entry_parm)
2469 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2470 else
2471 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2473 stack_parm = current_function_internal_arg_pointer;
2474 if (offset_rtx != const0_rtx)
2475 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2476 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2478 set_mem_attributes (stack_parm, parm, 1);
2480 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2481 align = 0;
2483 /* If we're padding upward, we know that the alignment of the slot
2484 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2485 intentionally forcing upward padding. Otherwise we have to come
2486 up with a guess at the alignment based on OFFSET_RTX. */
2487 if (data->locate.where_pad == upward || data->entry_parm)
2488 align = boundary;
2489 else if (GET_CODE (offset_rtx) == CONST_INT)
2491 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2492 align = align & -align;
2494 if (align > 0)
2495 set_mem_align (stack_parm, align);
2497 if (data->entry_parm)
2498 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2500 data->stack_parm = stack_parm;
2503 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2504 always valid and contiguous. */
2506 static void
2507 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2509 rtx entry_parm = data->entry_parm;
2510 rtx stack_parm = data->stack_parm;
2512 /* If this parm was passed part in regs and part in memory, pretend it
2513 arrived entirely in memory by pushing the register-part onto the stack.
2514 In the special case of a DImode or DFmode that is split, we could put
2515 it together in a pseudoreg directly, but for now that's not worth
2516 bothering with. */
2517 if (data->partial != 0)
2519 /* Handle calls that pass values in multiple non-contiguous
2520 locations. The Irix 6 ABI has examples of this. */
2521 if (GET_CODE (entry_parm) == PARALLEL)
2522 emit_group_store (validize_mem (stack_parm), entry_parm,
2523 data->passed_type,
2524 int_size_in_bytes (data->passed_type));
2525 else
2526 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2527 data->partial);
2529 entry_parm = stack_parm;
2532 /* If we didn't decide this parm came in a register, by default it came
2533 on the stack. */
2534 else if (entry_parm == NULL)
2535 entry_parm = stack_parm;
2537 /* When an argument is passed in multiple locations, we can't make use
2538 of this information, but we can save some copying if the whole argument
2539 is passed in a single register. */
2540 else if (GET_CODE (entry_parm) == PARALLEL
2541 && data->nominal_mode != BLKmode
2542 && data->passed_mode != BLKmode)
2544 size_t i, len = XVECLEN (entry_parm, 0);
2546 for (i = 0; i < len; i++)
2547 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2548 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2549 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2550 == data->passed_mode)
2551 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2553 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2554 break;
2558 data->entry_parm = entry_parm;
2561 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2562 always valid and properly aligned. */
2565 static void
2566 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2568 rtx stack_parm = data->stack_parm;
2570 /* If we can't trust the parm stack slot to be aligned enough for its
2571 ultimate type, don't use that slot after entry. We'll make another
2572 stack slot, if we need one. */
2573 if (STRICT_ALIGNMENT && stack_parm
2574 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2575 stack_parm = NULL;
2577 /* If parm was passed in memory, and we need to convert it on entry,
2578 don't store it back in that same slot. */
2579 else if (data->entry_parm == stack_parm
2580 && data->nominal_mode != BLKmode
2581 && data->nominal_mode != data->passed_mode)
2582 stack_parm = NULL;
2584 data->stack_parm = stack_parm;
2587 /* A subroutine of assign_parms. Return true if the current parameter
2588 should be stored as a BLKmode in the current frame. */
2590 static bool
2591 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2593 if (data->nominal_mode == BLKmode)
2594 return true;
2595 if (GET_CODE (data->entry_parm) == PARALLEL)
2596 return true;
2598 #ifdef BLOCK_REG_PADDING
2599 if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
2600 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
2601 return true;
2602 #endif
2604 return false;
2607 /* A subroutine of assign_parms. Arrange for the parameter to be
2608 present and valid in DATA->STACK_RTL. */
2610 static void
2611 assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
2613 rtx entry_parm = data->entry_parm;
2614 rtx stack_parm = data->stack_parm;
2616 /* If we've a non-block object that's nevertheless passed in parts,
2617 reconstitute it in register operations rather than on the stack. */
2618 if (GET_CODE (entry_parm) == PARALLEL
2619 && data->nominal_mode != BLKmode
2620 && XVECLEN (entry_parm, 0) > 1
2621 && optimize)
2623 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2625 emit_group_store (parmreg, entry_parm, data->nominal_type,
2626 int_size_in_bytes (data->nominal_type));
2627 SET_DECL_RTL (parm, parmreg);
2628 return;
2631 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2632 calls that pass values in multiple non-contiguous locations. */
2633 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2635 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2636 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2637 rtx mem;
2639 /* Note that we will be storing an integral number of words.
2640 So we have to be careful to ensure that we allocate an
2641 integral number of words. We do this below in the
2642 assign_stack_local if space was not allocated in the argument
2643 list. If it was, this will not work if PARM_BOUNDARY is not
2644 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2645 if it becomes a problem. Exception is when BLKmode arrives
2646 with arguments not conforming to word_mode. */
2648 if (stack_parm == 0)
2650 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2651 data->stack_parm = stack_parm;
2652 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2653 set_mem_attributes (stack_parm, parm, 1);
2655 else if (GET_CODE (entry_parm) == PARALLEL)
2657 else if (size != 0 && PARM_BOUNDARY % BITS_PER_WORD != 0)
2658 abort ();
2660 mem = validize_mem (stack_parm);
2662 /* Handle values in multiple non-contiguous locations. */
2663 if (GET_CODE (entry_parm) == PARALLEL)
2664 emit_group_store (mem, entry_parm, data->passed_type, size);
2666 else if (size == 0)
2669 /* If SIZE is that of a mode no bigger than a word, just use
2670 that mode's store operation. */
2671 else if (size <= UNITS_PER_WORD)
2673 enum machine_mode mode
2674 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2676 if (mode != BLKmode
2677 #ifdef BLOCK_REG_PADDING
2678 && (size == UNITS_PER_WORD
2679 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2680 != (BYTES_BIG_ENDIAN ? upward : downward)))
2681 #endif
2684 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2685 emit_move_insn (change_address (mem, mode, 0), reg);
2688 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2689 machine must be aligned to the left before storing
2690 to memory. Note that the previous test doesn't
2691 handle all cases (e.g. SIZE == 3). */
2692 else if (size != UNITS_PER_WORD
2693 #ifdef BLOCK_REG_PADDING
2694 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2695 == downward)
2696 #else
2697 && BYTES_BIG_ENDIAN
2698 #endif
2701 rtx tem, x;
2702 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2703 rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
2705 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2706 build_int_2 (by, 0), NULL_RTX, 1);
2707 tem = change_address (mem, word_mode, 0);
2708 emit_move_insn (tem, x);
2710 else
2711 move_block_from_reg (REGNO (data->entry_parm), mem,
2712 size_stored / UNITS_PER_WORD);
2714 else
2715 move_block_from_reg (REGNO (data->entry_parm), mem,
2716 size_stored / UNITS_PER_WORD);
2719 SET_DECL_RTL (parm, stack_parm);
2722 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2723 parameter. Get it there. Perform all ABI specified conversions. */
2725 static void
2726 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2727 struct assign_parm_data_one *data)
2729 rtx parmreg;
2730 enum machine_mode promoted_nominal_mode;
2731 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2732 bool did_conversion = false;
2734 /* Store the parm in a pseudoregister during the function, but we may
2735 need to do it in a wider mode. */
2737 promoted_nominal_mode
2738 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2740 parmreg = gen_reg_rtx (promoted_nominal_mode);
2742 if (!DECL_ARTIFICIAL (parm))
2743 mark_user_reg (parmreg);
2745 /* If this was an item that we received a pointer to,
2746 set DECL_RTL appropriately. */
2747 if (data->passed_pointer)
2749 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2750 set_mem_attributes (x, parm, 1);
2751 SET_DECL_RTL (parm, x);
2753 else
2755 SET_DECL_RTL (parm, parmreg);
2756 maybe_set_unchanging (DECL_RTL (parm), parm);
2759 /* Copy the value into the register. */
2760 if (data->nominal_mode != data->passed_mode
2761 || promoted_nominal_mode != data->promoted_mode)
2763 int save_tree_used;
2765 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2766 mode, by the caller. We now have to convert it to
2767 NOMINAL_MODE, if different. However, PARMREG may be in
2768 a different mode than NOMINAL_MODE if it is being stored
2769 promoted.
2771 If ENTRY_PARM is a hard register, it might be in a register
2772 not valid for operating in its mode (e.g., an odd-numbered
2773 register for a DFmode). In that case, moves are the only
2774 thing valid, so we can't do a convert from there. This
2775 occurs when the calling sequence allow such misaligned
2776 usages.
2778 In addition, the conversion may involve a call, which could
2779 clobber parameters which haven't been copied to pseudo
2780 registers yet. Therefore, we must first copy the parm to
2781 a pseudo reg here, and save the conversion until after all
2782 parameters have been moved. */
2784 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2786 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2788 push_to_sequence (all->conversion_insns);
2789 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2791 if (GET_CODE (tempreg) == SUBREG
2792 && GET_MODE (tempreg) == data->nominal_mode
2793 && REG_P (SUBREG_REG (tempreg))
2794 && data->nominal_mode == data->passed_mode
2795 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2796 && GET_MODE_SIZE (GET_MODE (tempreg))
2797 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2799 /* The argument is already sign/zero extended, so note it
2800 into the subreg. */
2801 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2802 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2805 /* TREE_USED gets set erroneously during expand_assignment. */
2806 save_tree_used = TREE_USED (parm);
2807 expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
2808 TREE_USED (parm) = save_tree_used;
2809 all->conversion_insns = get_insns ();
2810 end_sequence ();
2812 did_conversion = true;
2814 else
2815 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2817 /* If we were passed a pointer but the actual value can safely live
2818 in a register, put it in one. */
2819 if (data->passed_pointer
2820 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2821 /* If by-reference argument was promoted, demote it. */
2822 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2823 || use_register_for_decl (parm)))
2825 /* We can't use nominal_mode, because it will have been set to
2826 Pmode above. We must use the actual mode of the parm. */
2827 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2828 mark_user_reg (parmreg);
2830 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2832 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2833 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2835 push_to_sequence (all->conversion_insns);
2836 emit_move_insn (tempreg, DECL_RTL (parm));
2837 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2838 emit_move_insn (parmreg, tempreg);
2839 all->conversion_insns = get_insns();
2840 end_sequence ();
2842 did_conversion = true;
2844 else
2845 emit_move_insn (parmreg, DECL_RTL (parm));
2847 SET_DECL_RTL (parm, parmreg);
2849 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2850 now the parm. */
2851 data->stack_parm = NULL;
2854 #ifdef FUNCTION_ARG_CALLEE_COPIES
2855 /* If we are passed an arg by reference and it is our responsibility
2856 to make a copy, do it now.
2857 PASSED_TYPE and PASSED mode now refer to the pointer, not the
2858 original argument, so we must recreate them in the call to
2859 FUNCTION_ARG_CALLEE_COPIES. */
2860 /* ??? Later add code to handle the case that if the argument isn't
2861 modified, don't do the copy. */
2863 else if (data->passed_pointer)
2865 tree type = TREE_TYPE (data->passed_type);
2867 if (FUNCTION_ARG_CALLEE_COPIES (all->args_so_far, TYPE_MODE (type),
2868 type, data->named_arg)
2869 && !TREE_ADDRESSABLE (type))
2871 rtx copy;
2873 /* This sequence may involve a library call perhaps clobbering
2874 registers that haven't been copied to pseudos yet. */
2876 push_to_sequence (all->conversion_insns);
2878 if (!COMPLETE_TYPE_P (type)
2879 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2881 /* This is a variable sized object. */
2882 copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
2883 TYPE_ALIGN (type));
2884 copy = gen_rtx_MEM (BLKmode, copy);
2886 else
2887 copy = assign_stack_temp (TYPE_MODE (type),
2888 int_size_in_bytes (type), 1);
2889 set_mem_attributes (copy, parm, 1);
2891 store_expr (parm, copy, 0);
2892 emit_move_insn (parmreg, XEXP (copy, 0));
2893 all->conversion_insns = get_insns ();
2894 end_sequence ();
2896 did_conversion = true;
2899 #endif /* FUNCTION_ARG_CALLEE_COPIES */
2901 /* Mark the register as eliminable if we did no conversion and it was
2902 copied from memory at a fixed offset, and the arg pointer was not
2903 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2904 offset formed an invalid address, such memory-equivalences as we
2905 make here would screw up life analysis for it. */
2906 if (data->nominal_mode == data->passed_mode
2907 && !did_conversion
2908 && data->stack_parm != 0
2909 && MEM_P (data->stack_parm)
2910 && data->locate.offset.var == 0
2911 && reg_mentioned_p (virtual_incoming_args_rtx,
2912 XEXP (data->stack_parm, 0)))
2914 rtx linsn = get_last_insn ();
2915 rtx sinsn, set;
2917 /* Mark complex types separately. */
2918 if (GET_CODE (parmreg) == CONCAT)
2920 enum machine_mode submode
2921 = GET_MODE_INNER (GET_MODE (parmreg));
2922 int regnor = REGNO (gen_realpart (submode, parmreg));
2923 int regnoi = REGNO (gen_imagpart (submode, parmreg));
2924 rtx stackr = gen_realpart (submode, data->stack_parm);
2925 rtx stacki = gen_imagpart (submode, data->stack_parm);
2927 /* Scan backwards for the set of the real and
2928 imaginary parts. */
2929 for (sinsn = linsn; sinsn != 0;
2930 sinsn = prev_nonnote_insn (sinsn))
2932 set = single_set (sinsn);
2933 if (set == 0)
2934 continue;
2936 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2937 REG_NOTES (sinsn)
2938 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2939 REG_NOTES (sinsn));
2940 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2941 REG_NOTES (sinsn)
2942 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2943 REG_NOTES (sinsn));
2946 else if ((set = single_set (linsn)) != 0
2947 && SET_DEST (set) == parmreg)
2948 REG_NOTES (linsn)
2949 = gen_rtx_EXPR_LIST (REG_EQUIV,
2950 data->stack_parm, REG_NOTES (linsn));
2953 /* For pointer data type, suggest pointer register. */
2954 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2955 mark_reg_pointer (parmreg,
2956 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2959 /* A subroutine of assign_parms. Allocate stack space to hold the current
2960 parameter. Get it there. Perform all ABI specified conversions. */
2962 static void
2963 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2964 struct assign_parm_data_one *data)
2966 /* Value must be stored in the stack slot STACK_PARM during function
2967 execution. */
2969 if (data->promoted_mode != data->nominal_mode)
2971 /* Conversion is required. */
2972 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2974 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2976 push_to_sequence (all->conversion_insns);
2977 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2978 TYPE_UNSIGNED (TREE_TYPE (parm)));
2980 if (data->stack_parm)
2981 /* ??? This may need a big-endian conversion on sparc64. */
2982 data->stack_parm
2983 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2985 all->conversion_insns = get_insns ();
2986 end_sequence ();
2989 if (data->entry_parm != data->stack_parm)
2991 if (data->stack_parm == 0)
2993 data->stack_parm
2994 = assign_stack_local (GET_MODE (data->entry_parm),
2995 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2997 set_mem_attributes (data->stack_parm, parm, 1);
3000 if (data->promoted_mode != data->nominal_mode)
3002 push_to_sequence (all->conversion_insns);
3003 emit_move_insn (validize_mem (data->stack_parm),
3004 validize_mem (data->entry_parm));
3005 all->conversion_insns = get_insns ();
3006 end_sequence ();
3008 else
3009 emit_move_insn (validize_mem (data->stack_parm),
3010 validize_mem (data->entry_parm));
3013 SET_DECL_RTL (parm, data->stack_parm);
3016 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3017 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3019 static void
3020 assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
3022 tree parm;
3024 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
3026 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3027 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3029 rtx tmp, real, imag;
3030 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3032 real = DECL_RTL (fnargs);
3033 imag = DECL_RTL (TREE_CHAIN (fnargs));
3034 if (inner != GET_MODE (real))
3036 real = gen_lowpart_SUBREG (inner, real);
3037 imag = gen_lowpart_SUBREG (inner, imag);
3039 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3040 SET_DECL_RTL (parm, tmp);
3042 real = DECL_INCOMING_RTL (fnargs);
3043 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
3044 if (inner != GET_MODE (real))
3046 real = gen_lowpart_SUBREG (inner, real);
3047 imag = gen_lowpart_SUBREG (inner, imag);
3049 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3050 set_decl_incoming_rtl (parm, tmp);
3051 fnargs = TREE_CHAIN (fnargs);
3053 else
3055 SET_DECL_RTL (parm, DECL_RTL (fnargs));
3056 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
3058 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3059 instead of the copy of decl, i.e. FNARGS. */
3060 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
3061 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
3064 fnargs = TREE_CHAIN (fnargs);
3068 /* Assign RTL expressions to the function's parameters. This may involve
3069 copying them into registers and using those registers as the DECL_RTL. */
3071 void
3072 assign_parms (tree fndecl)
3074 struct assign_parm_data_all all;
3075 tree fnargs, parm;
3076 rtx internal_arg_pointer;
3077 int varargs_setup = 0;
3079 /* If the reg that the virtual arg pointer will be translated into is
3080 not a fixed reg or is the stack pointer, make a copy of the virtual
3081 arg pointer, and address parms via the copy. The frame pointer is
3082 considered fixed even though it is not marked as such.
3084 The second time through, simply use ap to avoid generating rtx. */
3086 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3087 || ! (fixed_regs[ARG_POINTER_REGNUM]
3088 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3089 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3090 else
3091 internal_arg_pointer = virtual_incoming_args_rtx;
3092 current_function_internal_arg_pointer = internal_arg_pointer;
3094 assign_parms_initialize_all (&all);
3095 fnargs = assign_parms_augmented_arg_list (&all);
3097 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3099 struct assign_parm_data_one data;
3101 /* Extract the type of PARM; adjust it according to ABI. */
3102 assign_parm_find_data_types (&all, parm, &data);
3104 /* Early out for errors and void parameters. */
3105 if (data.passed_mode == VOIDmode)
3107 SET_DECL_RTL (parm, const0_rtx);
3108 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3109 continue;
3112 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3113 for the unnamed dummy argument following the last named argument.
3114 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3115 we only want to do this when we get to the actual last named
3116 argument, which will be the first time LAST_NAMED gets set. */
3117 if (data.last_named && !varargs_setup)
3119 varargs_setup = true;
3120 assign_parms_setup_varargs (&all, &data, false);
3123 /* Find out where the parameter arrives in this function. */
3124 assign_parm_find_entry_rtl (&all, &data);
3126 /* Find out where stack space for this parameter might be. */
3127 if (assign_parm_is_stack_parm (&all, &data))
3129 assign_parm_find_stack_rtl (parm, &data);
3130 assign_parm_adjust_entry_rtl (&data);
3133 /* Record permanently how this parm was passed. */
3134 set_decl_incoming_rtl (parm, data.entry_parm);
3136 /* Update info on where next arg arrives in registers. */
3137 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3138 data.passed_type, data.named_arg);
3140 assign_parm_adjust_stack_rtl (&data);
3142 if (assign_parm_setup_block_p (&data))
3143 assign_parm_setup_block (parm, &data);
3144 else if (data.passed_pointer || use_register_for_decl (parm))
3145 assign_parm_setup_reg (&all, parm, &data);
3146 else
3147 assign_parm_setup_stack (&all, parm, &data);
3150 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3151 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3153 /* Output all parameter conversion instructions (possibly including calls)
3154 now that all parameters have been copied out of hard registers. */
3155 emit_insn (all.conversion_insns);
3157 /* If we are receiving a struct value address as the first argument, set up
3158 the RTL for the function result. As this might require code to convert
3159 the transmitted address to Pmode, we do this here to ensure that possible
3160 preliminary conversions of the address have been emitted already. */
3161 if (all.function_result_decl)
3163 tree result = DECL_RESULT (current_function_decl);
3164 rtx addr = DECL_RTL (all.function_result_decl);
3165 rtx x;
3167 addr = convert_memory_address (Pmode, addr);
3168 x = gen_rtx_MEM (DECL_MODE (result), addr);
3169 set_mem_attributes (x, result, 1);
3170 SET_DECL_RTL (result, x);
3173 /* We have aligned all the args, so add space for the pretend args. */
3174 current_function_pretend_args_size = all.pretend_args_size;
3175 all.stack_args_size.constant += all.extra_pretend_bytes;
3176 current_function_args_size = all.stack_args_size.constant;
3178 /* Adjust function incoming argument size for alignment and
3179 minimum length. */
3181 #ifdef REG_PARM_STACK_SPACE
3182 current_function_args_size = MAX (current_function_args_size,
3183 REG_PARM_STACK_SPACE (fndecl));
3184 #endif
3186 current_function_args_size
3187 = ((current_function_args_size + STACK_BYTES - 1)
3188 / STACK_BYTES) * STACK_BYTES;
3190 #ifdef ARGS_GROW_DOWNWARD
3191 current_function_arg_offset_rtx
3192 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3193 : expand_expr (size_diffop (all.stack_args_size.var,
3194 size_int (-all.stack_args_size.constant)),
3195 NULL_RTX, VOIDmode, 0));
3196 #else
3197 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3198 #endif
3200 /* See how many bytes, if any, of its args a function should try to pop
3201 on return. */
3203 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3204 current_function_args_size);
3206 /* For stdarg.h function, save info about
3207 regs and stack space used by the named args. */
3209 current_function_args_info = all.args_so_far;
3211 /* Set the rtx used for the function return value. Put this in its
3212 own variable so any optimizers that need this information don't have
3213 to include tree.h. Do this here so it gets done when an inlined
3214 function gets output. */
3216 current_function_return_rtx
3217 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3218 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3220 /* If scalar return value was computed in a pseudo-reg, or was a named
3221 return value that got dumped to the stack, copy that to the hard
3222 return register. */
3223 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3225 tree decl_result = DECL_RESULT (fndecl);
3226 rtx decl_rtl = DECL_RTL (decl_result);
3228 if (REG_P (decl_rtl)
3229 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3230 : DECL_REGISTER (decl_result))
3232 rtx real_decl_rtl;
3234 #ifdef FUNCTION_OUTGOING_VALUE
3235 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3236 fndecl);
3237 #else
3238 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3239 fndecl);
3240 #endif
3241 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3242 /* The delay slot scheduler assumes that current_function_return_rtx
3243 holds the hard register containing the return value, not a
3244 temporary pseudo. */
3245 current_function_return_rtx = real_decl_rtl;
3250 /* Indicate whether REGNO is an incoming argument to the current function
3251 that was promoted to a wider mode. If so, return the RTX for the
3252 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3253 that REGNO is promoted from and whether the promotion was signed or
3254 unsigned. */
3257 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3259 tree arg;
3261 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3262 arg = TREE_CHAIN (arg))
3263 if (REG_P (DECL_INCOMING_RTL (arg))
3264 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3265 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3267 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3268 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3270 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3271 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3272 && mode != DECL_MODE (arg))
3274 *pmode = DECL_MODE (arg);
3275 *punsignedp = unsignedp;
3276 return DECL_INCOMING_RTL (arg);
3280 return 0;
3284 /* Compute the size and offset from the start of the stacked arguments for a
3285 parm passed in mode PASSED_MODE and with type TYPE.
3287 INITIAL_OFFSET_PTR points to the current offset into the stacked
3288 arguments.
3290 The starting offset and size for this parm are returned in
3291 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3292 nonzero, the offset is that of stack slot, which is returned in
3293 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3294 padding required from the initial offset ptr to the stack slot.
3296 IN_REGS is nonzero if the argument will be passed in registers. It will
3297 never be set if REG_PARM_STACK_SPACE is not defined.
3299 FNDECL is the function in which the argument was defined.
3301 There are two types of rounding that are done. The first, controlled by
3302 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3303 list to be aligned to the specific boundary (in bits). This rounding
3304 affects the initial and starting offsets, but not the argument size.
3306 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3307 optionally rounds the size of the parm to PARM_BOUNDARY. The
3308 initial offset is not affected by this rounding, while the size always
3309 is and the starting offset may be. */
3311 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3312 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3313 callers pass in the total size of args so far as
3314 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3316 void
3317 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3318 int partial, tree fndecl ATTRIBUTE_UNUSED,
3319 struct args_size *initial_offset_ptr,
3320 struct locate_and_pad_arg_data *locate)
3322 tree sizetree;
3323 enum direction where_pad;
3324 int boundary;
3325 int reg_parm_stack_space = 0;
3326 int part_size_in_regs;
3328 #ifdef REG_PARM_STACK_SPACE
3329 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3331 /* If we have found a stack parm before we reach the end of the
3332 area reserved for registers, skip that area. */
3333 if (! in_regs)
3335 if (reg_parm_stack_space > 0)
3337 if (initial_offset_ptr->var)
3339 initial_offset_ptr->var
3340 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3341 ssize_int (reg_parm_stack_space));
3342 initial_offset_ptr->constant = 0;
3344 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3345 initial_offset_ptr->constant = reg_parm_stack_space;
3348 #endif /* REG_PARM_STACK_SPACE */
3350 part_size_in_regs = 0;
3351 if (reg_parm_stack_space == 0)
3352 part_size_in_regs = ((partial * UNITS_PER_WORD)
3353 / (PARM_BOUNDARY / BITS_PER_UNIT)
3354 * (PARM_BOUNDARY / BITS_PER_UNIT));
3356 sizetree
3357 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3358 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3359 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3360 locate->where_pad = where_pad;
3362 #ifdef ARGS_GROW_DOWNWARD
3363 locate->slot_offset.constant = -initial_offset_ptr->constant;
3364 if (initial_offset_ptr->var)
3365 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3366 initial_offset_ptr->var);
3369 tree s2 = sizetree;
3370 if (where_pad != none
3371 && (!host_integerp (sizetree, 1)
3372 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3373 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3374 SUB_PARM_SIZE (locate->slot_offset, s2);
3377 locate->slot_offset.constant += part_size_in_regs;
3379 if (!in_regs
3380 #ifdef REG_PARM_STACK_SPACE
3381 || REG_PARM_STACK_SPACE (fndecl) > 0
3382 #endif
3384 pad_to_arg_alignment (&locate->slot_offset, boundary,
3385 &locate->alignment_pad);
3387 locate->size.constant = (-initial_offset_ptr->constant
3388 - locate->slot_offset.constant);
3389 if (initial_offset_ptr->var)
3390 locate->size.var = size_binop (MINUS_EXPR,
3391 size_binop (MINUS_EXPR,
3392 ssize_int (0),
3393 initial_offset_ptr->var),
3394 locate->slot_offset.var);
3396 /* Pad_below needs the pre-rounded size to know how much to pad
3397 below. */
3398 locate->offset = locate->slot_offset;
3399 if (where_pad == downward)
3400 pad_below (&locate->offset, passed_mode, sizetree);
3402 #else /* !ARGS_GROW_DOWNWARD */
3403 if (!in_regs
3404 #ifdef REG_PARM_STACK_SPACE
3405 || REG_PARM_STACK_SPACE (fndecl) > 0
3406 #endif
3408 pad_to_arg_alignment (initial_offset_ptr, boundary,
3409 &locate->alignment_pad);
3410 locate->slot_offset = *initial_offset_ptr;
3412 #ifdef PUSH_ROUNDING
3413 if (passed_mode != BLKmode)
3414 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3415 #endif
3417 /* Pad_below needs the pre-rounded size to know how much to pad below
3418 so this must be done before rounding up. */
3419 locate->offset = locate->slot_offset;
3420 if (where_pad == downward)
3421 pad_below (&locate->offset, passed_mode, sizetree);
3423 if (where_pad != none
3424 && (!host_integerp (sizetree, 1)
3425 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3426 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3428 ADD_PARM_SIZE (locate->size, sizetree);
3430 locate->size.constant -= part_size_in_regs;
3431 #endif /* ARGS_GROW_DOWNWARD */
3434 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3435 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3437 static void
3438 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3439 struct args_size *alignment_pad)
3441 tree save_var = NULL_TREE;
3442 HOST_WIDE_INT save_constant = 0;
3443 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3444 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3446 #ifdef SPARC_STACK_BOUNDARY_HACK
3447 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3448 higher than the real alignment of %sp. However, when it does this,
3449 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3450 This is a temporary hack while the sparc port is fixed. */
3451 if (SPARC_STACK_BOUNDARY_HACK)
3452 sp_offset = 0;
3453 #endif
3455 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3457 save_var = offset_ptr->var;
3458 save_constant = offset_ptr->constant;
3461 alignment_pad->var = NULL_TREE;
3462 alignment_pad->constant = 0;
3464 if (boundary > BITS_PER_UNIT)
3466 if (offset_ptr->var)
3468 tree sp_offset_tree = ssize_int (sp_offset);
3469 tree offset = size_binop (PLUS_EXPR,
3470 ARGS_SIZE_TREE (*offset_ptr),
3471 sp_offset_tree);
3472 #ifdef ARGS_GROW_DOWNWARD
3473 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3474 #else
3475 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3476 #endif
3478 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3479 /* ARGS_SIZE_TREE includes constant term. */
3480 offset_ptr->constant = 0;
3481 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3482 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3483 save_var);
3485 else
3487 offset_ptr->constant = -sp_offset +
3488 #ifdef ARGS_GROW_DOWNWARD
3489 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3490 #else
3491 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3492 #endif
3493 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3494 alignment_pad->constant = offset_ptr->constant - save_constant;
3499 static void
3500 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3502 if (passed_mode != BLKmode)
3504 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3505 offset_ptr->constant
3506 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3507 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3508 - GET_MODE_SIZE (passed_mode));
3510 else
3512 if (TREE_CODE (sizetree) != INTEGER_CST
3513 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3515 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3516 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3517 /* Add it in. */
3518 ADD_PARM_SIZE (*offset_ptr, s2);
3519 SUB_PARM_SIZE (*offset_ptr, sizetree);
3524 /* Walk the tree of blocks describing the binding levels within a function
3525 and warn about variables the might be killed by setjmp or vfork.
3526 This is done after calling flow_analysis and before global_alloc
3527 clobbers the pseudo-regs to hard regs. */
3529 void
3530 setjmp_vars_warning (tree block)
3532 tree decl, sub;
3534 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3536 if (TREE_CODE (decl) == VAR_DECL
3537 && DECL_RTL_SET_P (decl)
3538 && REG_P (DECL_RTL (decl))
3539 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3540 warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
3541 decl, decl);
3544 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3545 setjmp_vars_warning (sub);
3548 /* Do the appropriate part of setjmp_vars_warning
3549 but for arguments instead of local variables. */
3551 void
3552 setjmp_args_warning (void)
3554 tree decl;
3555 for (decl = DECL_ARGUMENTS (current_function_decl);
3556 decl; decl = TREE_CHAIN (decl))
3557 if (DECL_RTL (decl) != 0
3558 && REG_P (DECL_RTL (decl))
3559 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3560 warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
3561 decl, decl);
3565 /* Convert a stack slot address ADDR for variable VAR
3566 (from a containing function)
3567 into an address valid in this function (using a static chain). */
3570 fix_lexical_addr (rtx addr, tree var)
3572 rtx basereg;
3573 HOST_WIDE_INT displacement;
3574 tree context = decl_function_context (var);
3575 struct function *fp;
3576 rtx base = 0;
3578 /* If this is the present function, we need not do anything. */
3579 if (context == current_function_decl)
3580 return addr;
3582 fp = find_function_data (context);
3584 /* Decode given address as base reg plus displacement. */
3585 if (REG_P (addr))
3586 basereg = addr, displacement = 0;
3587 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3588 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3589 else
3590 abort ();
3592 if (base == 0)
3593 abort ();
3595 /* Use same offset, relative to appropriate static chain or argument
3596 pointer. */
3597 return plus_constant (base, displacement);
3600 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3601 and create duplicate blocks. */
3602 /* ??? Need an option to either create block fragments or to create
3603 abstract origin duplicates of a source block. It really depends
3604 on what optimization has been performed. */
3606 void
3607 reorder_blocks (void)
3609 tree block = DECL_INITIAL (current_function_decl);
3610 varray_type block_stack;
3612 if (block == NULL_TREE)
3613 return;
3615 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3617 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3618 clear_block_marks (block);
3620 /* Prune the old trees away, so that they don't get in the way. */
3621 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3622 BLOCK_CHAIN (block) = NULL_TREE;
3624 /* Recreate the block tree from the note nesting. */
3625 reorder_blocks_1 (get_insns (), block, &block_stack);
3626 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3628 /* Remove deleted blocks from the block fragment chains. */
3629 reorder_fix_fragments (block);
3632 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3634 void
3635 clear_block_marks (tree block)
3637 while (block)
3639 TREE_ASM_WRITTEN (block) = 0;
3640 clear_block_marks (BLOCK_SUBBLOCKS (block));
3641 block = BLOCK_CHAIN (block);
3645 static void
3646 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3648 rtx insn;
3650 for (insn = insns; insn; insn = NEXT_INSN (insn))
3652 if (GET_CODE (insn) == NOTE)
3654 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3656 tree block = NOTE_BLOCK (insn);
3658 /* If we have seen this block before, that means it now
3659 spans multiple address regions. Create a new fragment. */
3660 if (TREE_ASM_WRITTEN (block))
3662 tree new_block = copy_node (block);
3663 tree origin;
3665 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3666 ? BLOCK_FRAGMENT_ORIGIN (block)
3667 : block);
3668 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3669 BLOCK_FRAGMENT_CHAIN (new_block)
3670 = BLOCK_FRAGMENT_CHAIN (origin);
3671 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3673 NOTE_BLOCK (insn) = new_block;
3674 block = new_block;
3677 BLOCK_SUBBLOCKS (block) = 0;
3678 TREE_ASM_WRITTEN (block) = 1;
3679 /* When there's only one block for the entire function,
3680 current_block == block and we mustn't do this, it
3681 will cause infinite recursion. */
3682 if (block != current_block)
3684 BLOCK_SUPERCONTEXT (block) = current_block;
3685 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3686 BLOCK_SUBBLOCKS (current_block) = block;
3687 current_block = block;
3689 VARRAY_PUSH_TREE (*p_block_stack, block);
3691 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3693 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3694 VARRAY_POP (*p_block_stack);
3695 BLOCK_SUBBLOCKS (current_block)
3696 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3697 current_block = BLOCK_SUPERCONTEXT (current_block);
3703 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3704 appears in the block tree, select one of the fragments to become
3705 the new origin block. */
3707 static void
3708 reorder_fix_fragments (tree block)
3710 while (block)
3712 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3713 tree new_origin = NULL_TREE;
3715 if (dup_origin)
3717 if (! TREE_ASM_WRITTEN (dup_origin))
3719 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3721 /* Find the first of the remaining fragments. There must
3722 be at least one -- the current block. */
3723 while (! TREE_ASM_WRITTEN (new_origin))
3724 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3725 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3728 else if (! dup_origin)
3729 new_origin = block;
3731 /* Re-root the rest of the fragments to the new origin. In the
3732 case that DUP_ORIGIN was null, that means BLOCK was the origin
3733 of a chain of fragments and we want to remove those fragments
3734 that didn't make it to the output. */
3735 if (new_origin)
3737 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3738 tree chain = *pp;
3740 while (chain)
3742 if (TREE_ASM_WRITTEN (chain))
3744 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3745 *pp = chain;
3746 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3748 chain = BLOCK_FRAGMENT_CHAIN (chain);
3750 *pp = NULL_TREE;
3753 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3754 block = BLOCK_CHAIN (block);
3758 /* Reverse the order of elements in the chain T of blocks,
3759 and return the new head of the chain (old last element). */
3761 tree
3762 blocks_nreverse (tree t)
3764 tree prev = 0, decl, next;
3765 for (decl = t; decl; decl = next)
3767 next = BLOCK_CHAIN (decl);
3768 BLOCK_CHAIN (decl) = prev;
3769 prev = decl;
3771 return prev;
3774 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3775 non-NULL, list them all into VECTOR, in a depth-first preorder
3776 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3777 blocks. */
3779 static int
3780 all_blocks (tree block, tree *vector)
3782 int n_blocks = 0;
3784 while (block)
3786 TREE_ASM_WRITTEN (block) = 0;
3788 /* Record this block. */
3789 if (vector)
3790 vector[n_blocks] = block;
3792 ++n_blocks;
3794 /* Record the subblocks, and their subblocks... */
3795 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3796 vector ? vector + n_blocks : 0);
3797 block = BLOCK_CHAIN (block);
3800 return n_blocks;
3803 /* Return a vector containing all the blocks rooted at BLOCK. The
3804 number of elements in the vector is stored in N_BLOCKS_P. The
3805 vector is dynamically allocated; it is the caller's responsibility
3806 to call `free' on the pointer returned. */
3808 static tree *
3809 get_block_vector (tree block, int *n_blocks_p)
3811 tree *block_vector;
3813 *n_blocks_p = all_blocks (block, NULL);
3814 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3815 all_blocks (block, block_vector);
3817 return block_vector;
3820 static GTY(()) int next_block_index = 2;
3822 /* Set BLOCK_NUMBER for all the blocks in FN. */
3824 void
3825 number_blocks (tree fn)
3827 int i;
3828 int n_blocks;
3829 tree *block_vector;
3831 /* For SDB and XCOFF debugging output, we start numbering the blocks
3832 from 1 within each function, rather than keeping a running
3833 count. */
3834 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3835 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3836 next_block_index = 1;
3837 #endif
3839 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3841 /* The top-level BLOCK isn't numbered at all. */
3842 for (i = 1; i < n_blocks; ++i)
3843 /* We number the blocks from two. */
3844 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3846 free (block_vector);
3848 return;
3851 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3853 tree
3854 debug_find_var_in_block_tree (tree var, tree block)
3856 tree t;
3858 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3859 if (t == var)
3860 return block;
3862 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3864 tree ret = debug_find_var_in_block_tree (var, t);
3865 if (ret)
3866 return ret;
3869 return NULL_TREE;
3872 /* Allocate a function structure for FNDECL and set its contents
3873 to the defaults. */
3875 void
3876 allocate_struct_function (tree fndecl)
3878 tree result;
3879 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3881 cfun = ggc_alloc_cleared (sizeof (struct function));
3883 cfun->stack_alignment_needed = STACK_BOUNDARY;
3884 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3886 current_function_funcdef_no = funcdef_no++;
3888 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3890 init_stmt_for_function ();
3891 init_eh_for_function ();
3893 lang_hooks.function.init (cfun);
3894 if (init_machine_status)
3895 cfun->machine = (*init_machine_status) ();
3897 if (fndecl == NULL)
3898 return;
3900 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3901 cfun->decl = fndecl;
3903 result = DECL_RESULT (fndecl);
3904 if (aggregate_value_p (result, fndecl))
3906 #ifdef PCC_STATIC_STRUCT_RETURN
3907 current_function_returns_pcc_struct = 1;
3908 #endif
3909 current_function_returns_struct = 1;
3912 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3914 current_function_stdarg
3915 = (fntype
3916 && TYPE_ARG_TYPES (fntype) != 0
3917 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3918 != void_type_node));
3921 /* Reset cfun, and other non-struct-function variables to defaults as
3922 appropriate for emitting rtl at the start of a function. */
3924 static void
3925 prepare_function_start (tree fndecl)
3927 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3928 cfun = DECL_STRUCT_FUNCTION (fndecl);
3929 else
3930 allocate_struct_function (fndecl);
3931 init_emit ();
3932 init_varasm_status (cfun);
3933 init_expr ();
3935 cse_not_expected = ! optimize;
3937 /* Caller save not needed yet. */
3938 caller_save_needed = 0;
3940 /* We haven't done register allocation yet. */
3941 reg_renumber = 0;
3943 /* Indicate that we need to distinguish between the return value of the
3944 present function and the return value of a function being called. */
3945 rtx_equal_function_value_matters = 1;
3947 /* Indicate that we have not instantiated virtual registers yet. */
3948 virtuals_instantiated = 0;
3950 /* Indicate that we want CONCATs now. */
3951 generating_concat_p = 1;
3953 /* Indicate we have no need of a frame pointer yet. */
3954 frame_pointer_needed = 0;
3957 /* Initialize the rtl expansion mechanism so that we can do simple things
3958 like generate sequences. This is used to provide a context during global
3959 initialization of some passes. */
3960 void
3961 init_dummy_function_start (void)
3963 prepare_function_start (NULL);
3966 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3967 and initialize static variables for generating RTL for the statements
3968 of the function. */
3970 void
3971 init_function_start (tree subr)
3973 prepare_function_start (subr);
3975 /* Prevent ever trying to delete the first instruction of a
3976 function. Also tell final how to output a linenum before the
3977 function prologue. Note linenums could be missing, e.g. when
3978 compiling a Java .class file. */
3979 if (! DECL_IS_BUILTIN (subr))
3980 emit_line_note (DECL_SOURCE_LOCATION (subr));
3982 /* Make sure first insn is a note even if we don't want linenums.
3983 This makes sure the first insn will never be deleted.
3984 Also, final expects a note to appear there. */
3985 emit_note (NOTE_INSN_DELETED);
3987 /* Warn if this value is an aggregate type,
3988 regardless of which calling convention we are using for it. */
3989 if (warn_aggregate_return
3990 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3991 warning ("function returns an aggregate");
3994 /* Make sure all values used by the optimization passes have sane
3995 defaults. */
3996 void
3997 init_function_for_compilation (void)
3999 reg_renumber = 0;
4001 /* No prologue/epilogue insns yet. */
4002 VARRAY_GROW (prologue, 0);
4003 VARRAY_GROW (epilogue, 0);
4004 VARRAY_GROW (sibcall_epilogue, 0);
4007 /* Expand a call to __main at the beginning of a possible main function. */
4009 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4010 #undef HAS_INIT_SECTION
4011 #define HAS_INIT_SECTION
4012 #endif
4014 void
4015 expand_main_function (void)
4017 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
4018 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
4020 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
4021 rtx tmp, seq;
4023 start_sequence ();
4024 /* Forcibly align the stack. */
4025 #ifdef STACK_GROWS_DOWNWARD
4026 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
4027 stack_pointer_rtx, 1, OPTAB_WIDEN);
4028 #else
4029 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
4030 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
4031 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
4032 stack_pointer_rtx, 1, OPTAB_WIDEN);
4033 #endif
4034 if (tmp != stack_pointer_rtx)
4035 emit_move_insn (stack_pointer_rtx, tmp);
4037 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
4038 tmp = force_reg (Pmode, const0_rtx);
4039 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
4040 seq = get_insns ();
4041 end_sequence ();
4043 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
4044 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
4045 break;
4046 if (tmp)
4047 emit_insn_before (seq, tmp);
4048 else
4049 emit_insn (seq);
4051 #endif
4053 #ifndef HAS_INIT_SECTION
4054 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4055 #endif
4058 /* The PENDING_SIZES represent the sizes of variable-sized types.
4059 Create RTL for the various sizes now (using temporary variables),
4060 so that we can refer to the sizes from the RTL we are generating
4061 for the current function. The PENDING_SIZES are a TREE_LIST. The
4062 TREE_VALUE of each node is a SAVE_EXPR. */
4064 void
4065 expand_pending_sizes (tree pending_sizes)
4067 tree tem;
4069 /* Evaluate now the sizes of any types declared among the arguments. */
4070 for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
4072 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
4073 /* Flush the queue in case this parameter declaration has
4074 side-effects. */
4075 emit_queue ();
4079 /* Start the RTL for a new function, and set variables used for
4080 emitting RTL.
4081 SUBR is the FUNCTION_DECL node.
4082 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4083 the function's parameters, which must be run at any return statement. */
4085 void
4086 expand_function_start (tree subr)
4088 /* Make sure volatile mem refs aren't considered
4089 valid operands of arithmetic insns. */
4090 init_recog_no_volatile ();
4092 current_function_profile
4093 = (profile_flag
4094 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4096 current_function_limit_stack
4097 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4099 /* Make the label for return statements to jump to. Do not special
4100 case machines with special return instructions -- they will be
4101 handled later during jump, ifcvt, or epilogue creation. */
4102 return_label = gen_label_rtx ();
4104 /* Initialize rtx used to return the value. */
4105 /* Do this before assign_parms so that we copy the struct value address
4106 before any library calls that assign parms might generate. */
4108 /* Decide whether to return the value in memory or in a register. */
4109 if (aggregate_value_p (DECL_RESULT (subr), subr))
4111 /* Returning something that won't go in a register. */
4112 rtx value_address = 0;
4114 #ifdef PCC_STATIC_STRUCT_RETURN
4115 if (current_function_returns_pcc_struct)
4117 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4118 value_address = assemble_static_space (size);
4120 else
4121 #endif
4123 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4124 /* Expect to be passed the address of a place to store the value.
4125 If it is passed as an argument, assign_parms will take care of
4126 it. */
4127 if (sv)
4129 value_address = gen_reg_rtx (Pmode);
4130 emit_move_insn (value_address, sv);
4133 if (value_address)
4135 rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
4136 set_mem_attributes (x, DECL_RESULT (subr), 1);
4137 SET_DECL_RTL (DECL_RESULT (subr), x);
4140 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4141 /* If return mode is void, this decl rtl should not be used. */
4142 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4143 else
4145 /* Compute the return values into a pseudo reg, which we will copy
4146 into the true return register after the cleanups are done. */
4148 /* In order to figure out what mode to use for the pseudo, we
4149 figure out what the mode of the eventual return register will
4150 actually be, and use that. */
4151 rtx hard_reg
4152 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
4153 subr, 1);
4155 /* Structures that are returned in registers are not aggregate_value_p,
4156 so we may see a PARALLEL or a REG. */
4157 if (REG_P (hard_reg))
4158 SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
4159 else if (GET_CODE (hard_reg) == PARALLEL)
4160 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4161 else
4162 abort ();
4164 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4165 result to the real return register(s). */
4166 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4169 /* Initialize rtx for parameters and local variables.
4170 In some cases this requires emitting insns. */
4171 assign_parms (subr);
4173 /* If function gets a static chain arg, store it. */
4174 if (cfun->static_chain_decl)
4176 tree parm = cfun->static_chain_decl;
4177 rtx local = gen_reg_rtx (Pmode);
4179 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4180 SET_DECL_RTL (parm, local);
4181 maybe_set_unchanging (local, parm);
4182 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4184 emit_move_insn (local, static_chain_incoming_rtx);
4187 /* If the function receives a non-local goto, then store the
4188 bits we need to restore the frame pointer. */
4189 if (cfun->nonlocal_goto_save_area)
4191 tree t_save;
4192 rtx r_save;
4194 /* ??? We need to do this save early. Unfortunately here is
4195 before the frame variable gets declared. Help out... */
4196 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4198 t_save = build (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
4199 integer_zero_node, NULL_TREE, NULL_TREE);
4200 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4202 emit_move_insn (r_save, virtual_stack_vars_rtx);
4203 update_nonlocal_goto_save_area ();
4206 /* The following was moved from init_function_start.
4207 The move is supposed to make sdb output more accurate. */
4208 /* Indicate the beginning of the function body,
4209 as opposed to parm setup. */
4210 emit_note (NOTE_INSN_FUNCTION_BEG);
4212 if (GET_CODE (get_last_insn ()) != NOTE)
4213 emit_note (NOTE_INSN_DELETED);
4214 parm_birth_insn = get_last_insn ();
4216 if (current_function_profile)
4218 #ifdef PROFILE_HOOK
4219 PROFILE_HOOK (current_function_funcdef_no);
4220 #endif
4223 /* After the display initializations is where the tail-recursion label
4224 should go, if we end up needing one. Ensure we have a NOTE here
4225 since some things (like trampolines) get placed before this. */
4226 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4228 /* Evaluate now the sizes of any types declared among the arguments. */
4229 expand_pending_sizes (nreverse (get_pending_sizes ()));
4231 /* Make sure there is a line number after the function entry setup code. */
4232 force_next_line_note ();
4235 /* Undo the effects of init_dummy_function_start. */
4236 void
4237 expand_dummy_function_end (void)
4239 /* End any sequences that failed to be closed due to syntax errors. */
4240 while (in_sequence_p ())
4241 end_sequence ();
4243 /* Outside function body, can't compute type's actual size
4244 until next function's body starts. */
4246 free_after_parsing (cfun);
4247 free_after_compilation (cfun);
4248 cfun = 0;
4251 /* Call DOIT for each hard register used as a return value from
4252 the current function. */
4254 void
4255 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4257 rtx outgoing = current_function_return_rtx;
4259 if (! outgoing)
4260 return;
4262 if (REG_P (outgoing))
4263 (*doit) (outgoing, arg);
4264 else if (GET_CODE (outgoing) == PARALLEL)
4266 int i;
4268 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4270 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4272 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4273 (*doit) (x, arg);
4278 static void
4279 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4281 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4284 void
4285 clobber_return_register (void)
4287 diddle_return_value (do_clobber_return_reg, NULL);
4289 /* In case we do use pseudo to return value, clobber it too. */
4290 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4292 tree decl_result = DECL_RESULT (current_function_decl);
4293 rtx decl_rtl = DECL_RTL (decl_result);
4294 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4296 do_clobber_return_reg (decl_rtl, NULL);
4301 static void
4302 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4304 emit_insn (gen_rtx_USE (VOIDmode, reg));
4307 void
4308 use_return_register (void)
4310 diddle_return_value (do_use_return_reg, NULL);
4313 /* Possibly warn about unused parameters. */
4314 void
4315 do_warn_unused_parameter (tree fn)
4317 tree decl;
4319 for (decl = DECL_ARGUMENTS (fn);
4320 decl; decl = TREE_CHAIN (decl))
4321 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4322 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4323 warning ("%Junused parameter '%D'", decl, decl);
4326 static GTY(()) rtx initial_trampoline;
4328 /* Generate RTL for the end of the current function. */
4330 void
4331 expand_function_end (void)
4333 rtx clobber_after;
4335 finish_expr_for_function ();
4337 /* If arg_pointer_save_area was referenced only from a nested
4338 function, we will not have initialized it yet. Do that now. */
4339 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4340 get_arg_pointer_save_area (cfun);
4342 /* If we are doing stack checking and this function makes calls,
4343 do a stack probe at the start of the function to ensure we have enough
4344 space for another stack frame. */
4345 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4347 rtx insn, seq;
4349 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4350 if (GET_CODE (insn) == CALL_INSN)
4352 start_sequence ();
4353 probe_stack_range (STACK_CHECK_PROTECT,
4354 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4355 seq = get_insns ();
4356 end_sequence ();
4357 emit_insn_before (seq, tail_recursion_reentry);
4358 break;
4362 /* Possibly warn about unused parameters.
4363 When frontend does unit-at-a-time, the warning is already
4364 issued at finalization time. */
4365 if (warn_unused_parameter
4366 && !lang_hooks.callgraph.expand_function)
4367 do_warn_unused_parameter (current_function_decl);
4369 /* End any sequences that failed to be closed due to syntax errors. */
4370 while (in_sequence_p ())
4371 end_sequence ();
4373 clear_pending_stack_adjust ();
4374 do_pending_stack_adjust ();
4376 /* @@@ This is a kludge. We want to ensure that instructions that
4377 may trap are not moved into the epilogue by scheduling, because
4378 we don't always emit unwind information for the epilogue.
4379 However, not all machine descriptions define a blockage insn, so
4380 emit an ASM_INPUT to act as one. */
4381 if (flag_non_call_exceptions)
4382 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4384 /* Mark the end of the function body.
4385 If control reaches this insn, the function can drop through
4386 without returning a value. */
4387 emit_note (NOTE_INSN_FUNCTION_END);
4389 /* Must mark the last line number note in the function, so that the test
4390 coverage code can avoid counting the last line twice. This just tells
4391 the code to ignore the immediately following line note, since there
4392 already exists a copy of this note somewhere above. This line number
4393 note is still needed for debugging though, so we can't delete it. */
4394 if (flag_test_coverage)
4395 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4397 /* Output a linenumber for the end of the function.
4398 SDB depends on this. */
4399 force_next_line_note ();
4400 emit_line_note (input_location);
4402 /* Before the return label (if any), clobber the return
4403 registers so that they are not propagated live to the rest of
4404 the function. This can only happen with functions that drop
4405 through; if there had been a return statement, there would
4406 have either been a return rtx, or a jump to the return label.
4408 We delay actual code generation after the current_function_value_rtx
4409 is computed. */
4410 clobber_after = get_last_insn ();
4412 /* Output the label for the actual return from the function,
4413 if one is expected. This happens either because a function epilogue
4414 is used instead of a return instruction, or because a return was done
4415 with a goto in order to run local cleanups, or because of pcc-style
4416 structure returning. */
4417 if (return_label)
4418 emit_label (return_label);
4420 /* Let except.c know where it should emit the call to unregister
4421 the function context for sjlj exceptions. */
4422 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4423 sjlj_emit_function_exit_after (get_last_insn ());
4425 /* If we had calls to alloca, and this machine needs
4426 an accurate stack pointer to exit the function,
4427 insert some code to save and restore the stack pointer. */
4428 if (! EXIT_IGNORE_STACK
4429 && current_function_calls_alloca)
4431 rtx tem = 0;
4433 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4434 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4437 /* If scalar return value was computed in a pseudo-reg, or was a named
4438 return value that got dumped to the stack, copy that to the hard
4439 return register. */
4440 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4442 tree decl_result = DECL_RESULT (current_function_decl);
4443 rtx decl_rtl = DECL_RTL (decl_result);
4445 if (REG_P (decl_rtl)
4446 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4447 : DECL_REGISTER (decl_result))
4449 rtx real_decl_rtl = current_function_return_rtx;
4451 /* This should be set in assign_parms. */
4452 if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
4453 abort ();
4455 /* If this is a BLKmode structure being returned in registers,
4456 then use the mode computed in expand_return. Note that if
4457 decl_rtl is memory, then its mode may have been changed,
4458 but that current_function_return_rtx has not. */
4459 if (GET_MODE (real_decl_rtl) == BLKmode)
4460 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4462 /* If a named return value dumped decl_return to memory, then
4463 we may need to re-do the PROMOTE_MODE signed/unsigned
4464 extension. */
4465 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4467 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4469 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4470 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4471 &unsignedp, 1);
4473 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4475 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4477 /* If expand_function_start has created a PARALLEL for decl_rtl,
4478 move the result to the real return registers. Otherwise, do
4479 a group load from decl_rtl for a named return. */
4480 if (GET_CODE (decl_rtl) == PARALLEL)
4481 emit_group_move (real_decl_rtl, decl_rtl);
4482 else
4483 emit_group_load (real_decl_rtl, decl_rtl,
4484 TREE_TYPE (decl_result),
4485 int_size_in_bytes (TREE_TYPE (decl_result)));
4487 else
4488 emit_move_insn (real_decl_rtl, decl_rtl);
4492 /* If returning a structure, arrange to return the address of the value
4493 in a place where debuggers expect to find it.
4495 If returning a structure PCC style,
4496 the caller also depends on this value.
4497 And current_function_returns_pcc_struct is not necessarily set. */
4498 if (current_function_returns_struct
4499 || current_function_returns_pcc_struct)
4501 rtx value_address
4502 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4503 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4504 #ifdef FUNCTION_OUTGOING_VALUE
4505 rtx outgoing
4506 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4507 current_function_decl);
4508 #else
4509 rtx outgoing
4510 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
4511 #endif
4513 /* Mark this as a function return value so integrate will delete the
4514 assignment and USE below when inlining this function. */
4515 REG_FUNCTION_VALUE_P (outgoing) = 1;
4517 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4518 value_address = convert_memory_address (GET_MODE (outgoing),
4519 value_address);
4521 emit_move_insn (outgoing, value_address);
4523 /* Show return register used to hold result (in this case the address
4524 of the result. */
4525 current_function_return_rtx = outgoing;
4528 /* If this is an implementation of throw, do what's necessary to
4529 communicate between __builtin_eh_return and the epilogue. */
4530 expand_eh_return ();
4532 /* Emit the actual code to clobber return register. */
4534 rtx seq, after;
4536 start_sequence ();
4537 clobber_return_register ();
4538 seq = get_insns ();
4539 end_sequence ();
4541 after = emit_insn_after (seq, clobber_after);
4544 /* Output the label for the naked return from the function, if one is
4545 expected. This is currently used only by __builtin_return. */
4546 if (naked_return_label)
4547 emit_label (naked_return_label);
4549 /* ??? This should no longer be necessary since stupid is no longer with
4550 us, but there are some parts of the compiler (eg reload_combine, and
4551 sh mach_dep_reorg) that still try and compute their own lifetime info
4552 instead of using the general framework. */
4553 use_return_register ();
4555 /* Fix up any gotos that jumped out to the outermost
4556 binding level of the function.
4557 Must follow emitting RETURN_LABEL. */
4559 /* If you have any cleanups to do at this point,
4560 and they need to create temporary variables,
4561 then you will lose. */
4562 expand_fixups (get_insns ());
4566 get_arg_pointer_save_area (struct function *f)
4568 rtx ret = f->x_arg_pointer_save_area;
4570 if (! ret)
4572 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4573 f->x_arg_pointer_save_area = ret;
4576 if (f == cfun && ! f->arg_pointer_save_area_init)
4578 rtx seq;
4580 /* Save the arg pointer at the beginning of the function. The
4581 generated stack slot may not be a valid memory address, so we
4582 have to check it and fix it if necessary. */
4583 start_sequence ();
4584 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4585 seq = get_insns ();
4586 end_sequence ();
4588 push_topmost_sequence ();
4589 emit_insn_after (seq, get_insns ());
4590 pop_topmost_sequence ();
4593 return ret;
4596 /* Extend a vector that records the INSN_UIDs of INSNS
4597 (a list of one or more insns). */
4599 static void
4600 record_insns (rtx insns, varray_type *vecp)
4602 int i, len;
4603 rtx tmp;
4605 tmp = insns;
4606 len = 0;
4607 while (tmp != NULL_RTX)
4609 len++;
4610 tmp = NEXT_INSN (tmp);
4613 i = VARRAY_SIZE (*vecp);
4614 VARRAY_GROW (*vecp, i + len);
4615 tmp = insns;
4616 while (tmp != NULL_RTX)
4618 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4619 i++;
4620 tmp = NEXT_INSN (tmp);
4624 /* Set the locator of the insn chain starting at INSN to LOC. */
4625 static void
4626 set_insn_locators (rtx insn, int loc)
4628 while (insn != NULL_RTX)
4630 if (INSN_P (insn))
4631 INSN_LOCATOR (insn) = loc;
4632 insn = NEXT_INSN (insn);
4636 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4637 be running after reorg, SEQUENCE rtl is possible. */
4639 static int
4640 contains (rtx insn, varray_type vec)
4642 int i, j;
4644 if (GET_CODE (insn) == INSN
4645 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4647 int count = 0;
4648 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4649 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4650 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4651 count++;
4652 return count;
4654 else
4656 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4657 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4658 return 1;
4660 return 0;
4664 prologue_epilogue_contains (rtx insn)
4666 if (contains (insn, prologue))
4667 return 1;
4668 if (contains (insn, epilogue))
4669 return 1;
4670 return 0;
4674 sibcall_epilogue_contains (rtx insn)
4676 if (sibcall_epilogue)
4677 return contains (insn, sibcall_epilogue);
4678 return 0;
4681 #ifdef HAVE_return
4682 /* Insert gen_return at the end of block BB. This also means updating
4683 block_for_insn appropriately. */
4685 static void
4686 emit_return_into_block (basic_block bb, rtx line_note)
4688 emit_jump_insn_after (gen_return (), BB_END (bb));
4689 if (line_note)
4690 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4692 #endif /* HAVE_return */
4694 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4696 /* These functions convert the epilogue into a variant that does not modify the
4697 stack pointer. This is used in cases where a function returns an object
4698 whose size is not known until it is computed. The called function leaves the
4699 object on the stack, leaves the stack depressed, and returns a pointer to
4700 the object.
4702 What we need to do is track all modifications and references to the stack
4703 pointer, deleting the modifications and changing the references to point to
4704 the location the stack pointer would have pointed to had the modifications
4705 taken place.
4707 These functions need to be portable so we need to make as few assumptions
4708 about the epilogue as we can. However, the epilogue basically contains
4709 three things: instructions to reset the stack pointer, instructions to
4710 reload registers, possibly including the frame pointer, and an
4711 instruction to return to the caller.
4713 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4714 We also make no attempt to validate the insns we make since if they are
4715 invalid, we probably can't do anything valid. The intent is that these
4716 routines get "smarter" as more and more machines start to use them and
4717 they try operating on different epilogues.
4719 We use the following structure to track what the part of the epilogue that
4720 we've already processed has done. We keep two copies of the SP equivalence,
4721 one for use during the insn we are processing and one for use in the next
4722 insn. The difference is because one part of a PARALLEL may adjust SP
4723 and the other may use it. */
4725 struct epi_info
4727 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4728 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4729 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4730 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4731 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4732 should be set to once we no longer need
4733 its value. */
4734 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4735 for registers. */
4738 static void handle_epilogue_set (rtx, struct epi_info *);
4739 static void update_epilogue_consts (rtx, rtx, void *);
4740 static void emit_equiv_load (struct epi_info *);
4742 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4743 no modifications to the stack pointer. Return the new list of insns. */
4745 static rtx
4746 keep_stack_depressed (rtx insns)
4748 int j;
4749 struct epi_info info;
4750 rtx insn, next;
4752 /* If the epilogue is just a single instruction, it must be OK as is. */
4753 if (NEXT_INSN (insns) == NULL_RTX)
4754 return insns;
4756 /* Otherwise, start a sequence, initialize the information we have, and
4757 process all the insns we were given. */
4758 start_sequence ();
4760 info.sp_equiv_reg = stack_pointer_rtx;
4761 info.sp_offset = 0;
4762 info.equiv_reg_src = 0;
4764 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4765 info.const_equiv[j] = 0;
4767 insn = insns;
4768 next = NULL_RTX;
4769 while (insn != NULL_RTX)
4771 next = NEXT_INSN (insn);
4773 if (!INSN_P (insn))
4775 add_insn (insn);
4776 insn = next;
4777 continue;
4780 /* If this insn references the register that SP is equivalent to and
4781 we have a pending load to that register, we must force out the load
4782 first and then indicate we no longer know what SP's equivalent is. */
4783 if (info.equiv_reg_src != 0
4784 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4786 emit_equiv_load (&info);
4787 info.sp_equiv_reg = 0;
4790 info.new_sp_equiv_reg = info.sp_equiv_reg;
4791 info.new_sp_offset = info.sp_offset;
4793 /* If this is a (RETURN) and the return address is on the stack,
4794 update the address and change to an indirect jump. */
4795 if (GET_CODE (PATTERN (insn)) == RETURN
4796 || (GET_CODE (PATTERN (insn)) == PARALLEL
4797 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4799 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4800 rtx base = 0;
4801 HOST_WIDE_INT offset = 0;
4802 rtx jump_insn, jump_set;
4804 /* If the return address is in a register, we can emit the insn
4805 unchanged. Otherwise, it must be a MEM and we see what the
4806 base register and offset are. In any case, we have to emit any
4807 pending load to the equivalent reg of SP, if any. */
4808 if (REG_P (retaddr))
4810 emit_equiv_load (&info);
4811 add_insn (insn);
4812 insn = next;
4813 continue;
4815 else if (MEM_P (retaddr)
4816 && REG_P (XEXP (retaddr, 0)))
4817 base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
4818 else if (MEM_P (retaddr)
4819 && GET_CODE (XEXP (retaddr, 0)) == PLUS
4820 && REG_P (XEXP (XEXP (retaddr, 0), 0))
4821 && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
4823 base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
4824 offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
4826 else
4827 abort ();
4829 /* If the base of the location containing the return pointer
4830 is SP, we must update it with the replacement address. Otherwise,
4831 just build the necessary MEM. */
4832 retaddr = plus_constant (base, offset);
4833 if (base == stack_pointer_rtx)
4834 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4835 plus_constant (info.sp_equiv_reg,
4836 info.sp_offset));
4838 retaddr = gen_rtx_MEM (Pmode, retaddr);
4840 /* If there is a pending load to the equivalent register for SP
4841 and we reference that register, we must load our address into
4842 a scratch register and then do that load. */
4843 if (info.equiv_reg_src
4844 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4846 unsigned int regno;
4847 rtx reg;
4849 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4850 if (HARD_REGNO_MODE_OK (regno, Pmode)
4851 && !fixed_regs[regno]
4852 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4853 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4854 regno)
4855 && !refers_to_regno_p (regno,
4856 regno + hard_regno_nregs[regno]
4857 [Pmode],
4858 info.equiv_reg_src, NULL)
4859 && info.const_equiv[regno] == 0)
4860 break;
4862 if (regno == FIRST_PSEUDO_REGISTER)
4863 abort ();
4865 reg = gen_rtx_REG (Pmode, regno);
4866 emit_move_insn (reg, retaddr);
4867 retaddr = reg;
4870 emit_equiv_load (&info);
4871 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4873 /* Show the SET in the above insn is a RETURN. */
4874 jump_set = single_set (jump_insn);
4875 if (jump_set == 0)
4876 abort ();
4877 else
4878 SET_IS_RETURN_P (jump_set) = 1;
4881 /* If SP is not mentioned in the pattern and its equivalent register, if
4882 any, is not modified, just emit it. Otherwise, if neither is set,
4883 replace the reference to SP and emit the insn. If none of those are
4884 true, handle each SET individually. */
4885 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4886 && (info.sp_equiv_reg == stack_pointer_rtx
4887 || !reg_set_p (info.sp_equiv_reg, insn)))
4888 add_insn (insn);
4889 else if (! reg_set_p (stack_pointer_rtx, insn)
4890 && (info.sp_equiv_reg == stack_pointer_rtx
4891 || !reg_set_p (info.sp_equiv_reg, insn)))
4893 if (! validate_replace_rtx (stack_pointer_rtx,
4894 plus_constant (info.sp_equiv_reg,
4895 info.sp_offset),
4896 insn))
4897 abort ();
4899 add_insn (insn);
4901 else if (GET_CODE (PATTERN (insn)) == SET)
4902 handle_epilogue_set (PATTERN (insn), &info);
4903 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4905 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4906 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4907 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4909 else
4910 add_insn (insn);
4912 info.sp_equiv_reg = info.new_sp_equiv_reg;
4913 info.sp_offset = info.new_sp_offset;
4915 /* Now update any constants this insn sets. */
4916 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4917 insn = next;
4920 insns = get_insns ();
4921 end_sequence ();
4922 return insns;
4925 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4926 structure that contains information about what we've seen so far. We
4927 process this SET by either updating that data or by emitting one or
4928 more insns. */
4930 static void
4931 handle_epilogue_set (rtx set, struct epi_info *p)
4933 /* First handle the case where we are setting SP. Record what it is being
4934 set from. If unknown, abort. */
4935 if (reg_set_p (stack_pointer_rtx, set))
4937 if (SET_DEST (set) != stack_pointer_rtx)
4938 abort ();
4940 if (GET_CODE (SET_SRC (set)) == PLUS)
4942 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4943 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4944 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4945 else if (REG_P (XEXP (SET_SRC (set), 1))
4946 && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
4947 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
4948 p->new_sp_offset
4949 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4950 else
4951 abort ();
4953 else
4954 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4956 /* If we are adjusting SP, we adjust from the old data. */
4957 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4959 p->new_sp_equiv_reg = p->sp_equiv_reg;
4960 p->new_sp_offset += p->sp_offset;
4963 if (p->new_sp_equiv_reg == 0 || !REG_P (p->new_sp_equiv_reg))
4964 abort ();
4966 return;
4969 /* Next handle the case where we are setting SP's equivalent register.
4970 If we already have a value to set it to, abort. We could update, but
4971 there seems little point in handling that case. Note that we have
4972 to allow for the case where we are setting the register set in
4973 the previous part of a PARALLEL inside a single insn. But use the
4974 old offset for any updates within this insn. We must allow for the case
4975 where the register is being set in a different (usually wider) mode than
4976 Pmode). */
4977 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4979 if (p->equiv_reg_src != 0
4980 || !REG_P (p->new_sp_equiv_reg)
4981 || !REG_P (SET_DEST (set))
4982 || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
4983 || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
4984 abort ();
4985 else
4986 p->equiv_reg_src
4987 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4988 plus_constant (p->sp_equiv_reg,
4989 p->sp_offset));
4992 /* Otherwise, replace any references to SP in the insn to its new value
4993 and emit the insn. */
4994 else
4996 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4997 plus_constant (p->sp_equiv_reg,
4998 p->sp_offset));
4999 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5000 plus_constant (p->sp_equiv_reg,
5001 p->sp_offset));
5002 emit_insn (set);
5006 /* Update the tracking information for registers set to constants. */
5008 static void
5009 update_epilogue_consts (rtx dest, rtx x, void *data)
5011 struct epi_info *p = (struct epi_info *) data;
5012 rtx new;
5014 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5015 return;
5017 /* If we are either clobbering a register or doing a partial set,
5018 show we don't know the value. */
5019 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5020 p->const_equiv[REGNO (dest)] = 0;
5022 /* If we are setting it to a constant, record that constant. */
5023 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5024 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5026 /* If this is a binary operation between a register we have been tracking
5027 and a constant, see if we can compute a new constant value. */
5028 else if (ARITHMETIC_P (SET_SRC (x))
5029 && REG_P (XEXP (SET_SRC (x), 0))
5030 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5031 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5032 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5033 && 0 != (new = simplify_binary_operation
5034 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5035 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5036 XEXP (SET_SRC (x), 1)))
5037 && GET_CODE (new) == CONST_INT)
5038 p->const_equiv[REGNO (dest)] = new;
5040 /* Otherwise, we can't do anything with this value. */
5041 else
5042 p->const_equiv[REGNO (dest)] = 0;
5045 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5047 static void
5048 emit_equiv_load (struct epi_info *p)
5050 if (p->equiv_reg_src != 0)
5052 rtx dest = p->sp_equiv_reg;
5054 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5055 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5056 REGNO (p->sp_equiv_reg));
5058 emit_move_insn (dest, p->equiv_reg_src);
5059 p->equiv_reg_src = 0;
5062 #endif
5064 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5065 this into place with notes indicating where the prologue ends and where
5066 the epilogue begins. Update the basic block information when possible. */
5068 void
5069 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5071 int inserted = 0;
5072 edge e;
5073 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5074 rtx seq;
5075 #endif
5076 #ifdef HAVE_prologue
5077 rtx prologue_end = NULL_RTX;
5078 #endif
5079 #if defined (HAVE_epilogue) || defined(HAVE_return)
5080 rtx epilogue_end = NULL_RTX;
5081 #endif
5083 #ifdef HAVE_prologue
5084 if (HAVE_prologue)
5086 start_sequence ();
5087 seq = gen_prologue ();
5088 emit_insn (seq);
5090 /* Retain a map of the prologue insns. */
5091 record_insns (seq, &prologue);
5092 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5094 seq = get_insns ();
5095 end_sequence ();
5096 set_insn_locators (seq, prologue_locator);
5098 /* Can't deal with multiple successors of the entry block
5099 at the moment. Function should always have at least one
5100 entry point. */
5101 if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
5102 abort ();
5104 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
5105 inserted = 1;
5107 #endif
5109 /* If the exit block has no non-fake predecessors, we don't need
5110 an epilogue. */
5111 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5112 if ((e->flags & EDGE_FAKE) == 0)
5113 break;
5114 if (e == NULL)
5115 goto epilogue_done;
5117 #ifdef HAVE_return
5118 if (optimize && HAVE_return)
5120 /* If we're allowed to generate a simple return instruction,
5121 then by definition we don't need a full epilogue. Examine
5122 the block that falls through to EXIT. If it does not
5123 contain any code, examine its predecessors and try to
5124 emit (conditional) return instructions. */
5126 basic_block last;
5127 edge e_next;
5128 rtx label;
5130 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5131 if (e->flags & EDGE_FALLTHRU)
5132 break;
5133 if (e == NULL)
5134 goto epilogue_done;
5135 last = e->src;
5137 /* Verify that there are no active instructions in the last block. */
5138 label = BB_END (last);
5139 while (label && GET_CODE (label) != CODE_LABEL)
5141 if (active_insn_p (label))
5142 break;
5143 label = PREV_INSN (label);
5146 if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
5148 rtx epilogue_line_note = NULL_RTX;
5150 /* Locate the line number associated with the closing brace,
5151 if we can find one. */
5152 for (seq = get_last_insn ();
5153 seq && ! active_insn_p (seq);
5154 seq = PREV_INSN (seq))
5155 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
5157 epilogue_line_note = seq;
5158 break;
5161 for (e = last->pred; e; e = e_next)
5163 basic_block bb = e->src;
5164 rtx jump;
5166 e_next = e->pred_next;
5167 if (bb == ENTRY_BLOCK_PTR)
5168 continue;
5170 jump = BB_END (bb);
5171 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
5172 continue;
5174 /* If we have an unconditional jump, we can replace that
5175 with a simple return instruction. */
5176 if (simplejump_p (jump))
5178 emit_return_into_block (bb, epilogue_line_note);
5179 delete_insn (jump);
5182 /* If we have a conditional jump, we can try to replace
5183 that with a conditional return instruction. */
5184 else if (condjump_p (jump))
5186 if (! redirect_jump (jump, 0, 0))
5187 continue;
5189 /* If this block has only one successor, it both jumps
5190 and falls through to the fallthru block, so we can't
5191 delete the edge. */
5192 if (bb->succ->succ_next == NULL)
5193 continue;
5195 else
5196 continue;
5198 /* Fix up the CFG for the successful change we just made. */
5199 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5202 /* Emit a return insn for the exit fallthru block. Whether
5203 this is still reachable will be determined later. */
5205 emit_barrier_after (BB_END (last));
5206 emit_return_into_block (last, epilogue_line_note);
5207 epilogue_end = BB_END (last);
5208 last->succ->flags &= ~EDGE_FALLTHRU;
5209 goto epilogue_done;
5212 #endif
5213 /* Find the edge that falls through to EXIT. Other edges may exist
5214 due to RETURN instructions, but those don't need epilogues.
5215 There really shouldn't be a mixture -- either all should have
5216 been converted or none, however... */
5218 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5219 if (e->flags & EDGE_FALLTHRU)
5220 break;
5221 if (e == NULL)
5222 goto epilogue_done;
5224 #ifdef HAVE_epilogue
5225 if (HAVE_epilogue)
5227 start_sequence ();
5228 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5230 seq = gen_epilogue ();
5232 #ifdef INCOMING_RETURN_ADDR_RTX
5233 /* If this function returns with the stack depressed and we can support
5234 it, massage the epilogue to actually do that. */
5235 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5236 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5237 seq = keep_stack_depressed (seq);
5238 #endif
5240 emit_jump_insn (seq);
5242 /* Retain a map of the epilogue insns. */
5243 record_insns (seq, &epilogue);
5244 set_insn_locators (seq, epilogue_locator);
5246 seq = get_insns ();
5247 end_sequence ();
5249 insert_insn_on_edge (seq, e);
5250 inserted = 1;
5252 else
5253 #endif
5255 basic_block cur_bb;
5257 if (! next_active_insn (BB_END (e->src)))
5258 goto epilogue_done;
5259 /* We have a fall-through edge to the exit block, the source is not
5260 at the end of the function, and there will be an assembler epilogue
5261 at the end of the function.
5262 We can't use force_nonfallthru here, because that would try to
5263 use return. Inserting a jump 'by hand' is extremely messy, so
5264 we take advantage of cfg_layout_finalize using
5265 fixup_fallthru_exit_predecessor. */
5266 cfg_layout_initialize ();
5267 FOR_EACH_BB (cur_bb)
5268 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5269 cur_bb->rbi->next = cur_bb->next_bb;
5270 cfg_layout_finalize ();
5272 epilogue_done:
5274 if (inserted)
5275 commit_edge_insertions ();
5277 #ifdef HAVE_sibcall_epilogue
5278 /* Emit sibling epilogues before any sibling call sites. */
5279 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5281 basic_block bb = e->src;
5282 rtx insn = BB_END (bb);
5283 rtx i;
5284 rtx newinsn;
5286 if (GET_CODE (insn) != CALL_INSN
5287 || ! SIBLING_CALL_P (insn))
5288 continue;
5290 start_sequence ();
5291 emit_insn (gen_sibcall_epilogue ());
5292 seq = get_insns ();
5293 end_sequence ();
5295 /* Retain a map of the epilogue insns. Used in life analysis to
5296 avoid getting rid of sibcall epilogue insns. Do this before we
5297 actually emit the sequence. */
5298 record_insns (seq, &sibcall_epilogue);
5299 set_insn_locators (seq, epilogue_locator);
5301 i = PREV_INSN (insn);
5302 newinsn = emit_insn_before (seq, insn);
5304 #endif
5306 #ifdef HAVE_prologue
5307 /* This is probably all useless now that we use locators. */
5308 if (prologue_end)
5310 rtx insn, prev;
5312 /* GDB handles `break f' by setting a breakpoint on the first
5313 line note after the prologue. Which means (1) that if
5314 there are line number notes before where we inserted the
5315 prologue we should move them, and (2) we should generate a
5316 note before the end of the first basic block, if there isn't
5317 one already there.
5319 ??? This behavior is completely broken when dealing with
5320 multiple entry functions. We simply place the note always
5321 into first basic block and let alternate entry points
5322 to be missed.
5325 for (insn = prologue_end; insn; insn = prev)
5327 prev = PREV_INSN (insn);
5328 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5330 /* Note that we cannot reorder the first insn in the
5331 chain, since rest_of_compilation relies on that
5332 remaining constant. */
5333 if (prev == NULL)
5334 break;
5335 reorder_insns (insn, insn, prologue_end);
5339 /* Find the last line number note in the first block. */
5340 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5341 insn != prologue_end && insn;
5342 insn = PREV_INSN (insn))
5343 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5344 break;
5346 /* If we didn't find one, make a copy of the first line number
5347 we run across. */
5348 if (! insn)
5350 for (insn = next_active_insn (prologue_end);
5351 insn;
5352 insn = PREV_INSN (insn))
5353 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
5355 emit_note_copy_after (insn, prologue_end);
5356 break;
5360 #endif
5361 #ifdef HAVE_epilogue
5362 if (epilogue_end)
5364 rtx insn, next;
5366 /* Similarly, move any line notes that appear after the epilogue.
5367 There is no need, however, to be quite so anal about the existence
5368 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5369 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5370 info generation. */
5371 for (insn = epilogue_end; insn; insn = next)
5373 next = NEXT_INSN (insn);
5374 if (GET_CODE (insn) == NOTE
5375 && (NOTE_LINE_NUMBER (insn) > 0
5376 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5377 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5378 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5381 #endif
5384 /* Reposition the prologue-end and epilogue-begin notes after instruction
5385 scheduling and delayed branch scheduling. */
5387 void
5388 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5390 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5391 rtx insn, last, note;
5392 int len;
5394 if ((len = VARRAY_SIZE (prologue)) > 0)
5396 last = 0, note = 0;
5398 /* Scan from the beginning until we reach the last prologue insn.
5399 We apparently can't depend on basic_block_{head,end} after
5400 reorg has run. */
5401 for (insn = f; insn; insn = NEXT_INSN (insn))
5403 if (GET_CODE (insn) == NOTE)
5405 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5406 note = insn;
5408 else if (contains (insn, prologue))
5410 last = insn;
5411 if (--len == 0)
5412 break;
5416 if (last)
5418 /* Find the prologue-end note if we haven't already, and
5419 move it to just after the last prologue insn. */
5420 if (note == 0)
5422 for (note = last; (note = NEXT_INSN (note));)
5423 if (GET_CODE (note) == NOTE
5424 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5425 break;
5428 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5429 if (GET_CODE (last) == CODE_LABEL)
5430 last = NEXT_INSN (last);
5431 reorder_insns (note, note, last);
5435 if ((len = VARRAY_SIZE (epilogue)) > 0)
5437 last = 0, note = 0;
5439 /* Scan from the end until we reach the first epilogue insn.
5440 We apparently can't depend on basic_block_{head,end} after
5441 reorg has run. */
5442 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5444 if (GET_CODE (insn) == NOTE)
5446 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5447 note = insn;
5449 else if (contains (insn, epilogue))
5451 last = insn;
5452 if (--len == 0)
5453 break;
5457 if (last)
5459 /* Find the epilogue-begin note if we haven't already, and
5460 move it to just before the first epilogue insn. */
5461 if (note == 0)
5463 for (note = insn; (note = PREV_INSN (note));)
5464 if (GET_CODE (note) == NOTE
5465 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5466 break;
5469 if (PREV_INSN (last) != note)
5470 reorder_insns (note, note, PREV_INSN (last));
5473 #endif /* HAVE_prologue or HAVE_epilogue */
5476 /* Called once, at initialization, to initialize function.c. */
5478 void
5479 init_function_once (void)
5481 VARRAY_INT_INIT (prologue, 0, "prologue");
5482 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5483 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5486 /* Resets insn_block_boundaries array. */
5488 void
5489 reset_block_changes (void)
5491 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5492 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5495 /* Record the boundary for BLOCK. */
5496 void
5497 record_block_change (tree block)
5499 int i, n;
5500 tree last_block;
5502 if (!block)
5503 return;
5505 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5506 VARRAY_POP (cfun->ib_boundaries_block);
5507 n = get_max_uid ();
5508 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5509 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5511 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5514 /* Finishes record of boundaries. */
5515 void finalize_block_changes (void)
5517 record_block_change (DECL_INITIAL (current_function_decl));
5520 /* For INSN return the BLOCK it belongs to. */
5521 void
5522 check_block_change (rtx insn, tree *block)
5524 unsigned uid = INSN_UID (insn);
5526 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5527 return;
5529 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5532 /* Releases the ib_boundaries_block records. */
5533 void
5534 free_block_changes (void)
5536 cfun->ib_boundaries_block = NULL;
5539 /* Returns the name of the current function. */
5540 const char *
5541 current_function_name (void)
5543 return lang_hooks.decl_printable_name (cfun->decl, 2);
5546 #include "gt-function.h"