* rtl.h (rtunion_def): Constify member `rtstr'.
[official-gcc.git] / gcc / function.c
blob093a0d83c046f27556b2d07b92ed99fd7765bbb4
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register.
37 Call `put_var_into_stack' when you learn, belatedly, that a variable
38 previously given a pseudo-register must in fact go in the stack.
39 This function changes the DECL_RTL to be a stack slot instead of a reg
40 then scans all the RTL instructions so far generated to correct them. */
42 #include "config.h"
43 #include "system.h"
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "except.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "toplev.h"
60 #include "hash.h"
61 #include "ggc.h"
62 #include "tm_p.h"
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
72 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
73 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
74 #endif
76 /* Some systems use __main in a way incompatible with its use in gcc, in these
77 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
78 give the same symbol without quotes for an alternative entry point. You
79 must define both, or neither. */
80 #ifndef NAME__MAIN
81 #define NAME__MAIN "__main"
82 #define SYMBOL__MAIN __main
83 #endif
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
102 #endif
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
107 compiler passes. */
108 int current_function_is_leaf;
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
122 static int virtuals_instantiated;
124 /* These variables hold pointers to functions to
125 save and restore machine-specific data,
126 in push_function_context and pop_function_context. */
127 void (*init_machine_status) PARAMS ((struct function *));
128 void (*save_machine_status) PARAMS ((struct function *));
129 void (*restore_machine_status) PARAMS ((struct function *));
130 void (*mark_machine_status) PARAMS ((struct function *));
131 void (*free_machine_status) PARAMS ((struct function *));
133 /* Likewise, but for language-specific data. */
134 void (*init_lang_status) PARAMS ((struct function *));
135 void (*save_lang_status) PARAMS ((struct function *));
136 void (*restore_lang_status) PARAMS ((struct function *));
137 void (*mark_lang_status) PARAMS ((struct function *));
138 void (*free_lang_status) PARAMS ((struct function *));
140 /* The FUNCTION_DECL for an inline function currently being expanded. */
141 tree inline_function_decl;
143 /* The currently compiled function. */
144 struct function *cfun = 0;
146 /* Global list of all compiled functions. */
147 struct function *all_functions = 0;
149 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
150 static int *prologue;
151 static int *epilogue;
153 /* In order to evaluate some expressions, such as function calls returning
154 structures in memory, we need to temporarily allocate stack locations.
155 We record each allocated temporary in the following structure.
157 Associated with each temporary slot is a nesting level. When we pop up
158 one level, all temporaries associated with the previous level are freed.
159 Normally, all temporaries are freed after the execution of the statement
160 in which they were created. However, if we are inside a ({...}) grouping,
161 the result may be in a temporary and hence must be preserved. If the
162 result could be in a temporary, we preserve it if we can determine which
163 one it is in. If we cannot determine which temporary may contain the
164 result, all temporaries are preserved. A temporary is preserved by
165 pretending it was allocated at the previous nesting level.
167 Automatic variables are also assigned temporary slots, at the nesting
168 level where they are defined. They are marked a "kept" so that
169 free_temp_slots will not free them. */
171 struct temp_slot
173 /* Points to next temporary slot. */
174 struct temp_slot *next;
175 /* The rtx to used to reference the slot. */
176 rtx slot;
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 rtx address;
180 /* The alignment (in bits) of the slot. */
181 int align;
182 /* The size, in units, of the slot. */
183 HOST_WIDE_INT size;
184 /* The alias set for the slot. If the alias set is zero, we don't
185 know anything about the alias set of the slot. We must only
186 reuse a slot if it is assigned an object of the same alias set.
187 Otherwise, the rest of the compiler may assume that the new use
188 of the slot cannot alias the old use of the slot, which is
189 false. If the slot has alias set zero, then we can't reuse the
190 slot at all, since we have no idea what alias set may have been
191 imposed on the memory. For example, if the stack slot is the
192 call frame for an inline functioned, we have no idea what alias
193 sets will be assigned to various pieces of the call frame. */
194 int alias_set;
195 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 tree rtl_expr;
197 /* Non-zero if this temporary is currently in use. */
198 char in_use;
199 /* Non-zero if this temporary has its address taken. */
200 char addr_taken;
201 /* Nesting level at which this slot is being used. */
202 int level;
203 /* Non-zero if this should survive a call to free_temp_slots. */
204 int keep;
205 /* The offset of the slot from the frame_pointer, including extra space
206 for alignment. This info is for combine_temp_slots. */
207 HOST_WIDE_INT base_offset;
208 /* The size of the slot, including extra space for alignment. This
209 info is for combine_temp_slots. */
210 HOST_WIDE_INT full_size;
213 /* This structure is used to record MEMs or pseudos used to replace VAR, any
214 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
215 maintain this list in case two operands of an insn were required to match;
216 in that case we must ensure we use the same replacement. */
218 struct fixup_replacement
220 rtx old;
221 rtx new;
222 struct fixup_replacement *next;
225 struct insns_for_mem_entry {
226 /* The KEY in HE will be a MEM. */
227 struct hash_entry he;
228 /* These are the INSNS which reference the MEM. */
229 rtx insns;
232 /* Forward declarations. */
234 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
235 int, struct function *));
236 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
237 HOST_WIDE_INT, int, tree));
238 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
239 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
240 enum machine_mode, enum machine_mode,
241 int, int, int, struct hash_table *));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
247 rtx, int, struct hash_table *));
248 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
249 struct fixup_replacement **));
250 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
251 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
252 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
253 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
254 static void instantiate_decls PARAMS ((tree, int));
255 static void instantiate_decls_1 PARAMS ((tree, int));
256 static void instantiate_decl PARAMS ((rtx, int, int));
257 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
258 static void delete_handlers PARAMS ((void));
259 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
260 struct args_size *));
261 #ifndef ARGS_GROW_DOWNWARD
262 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
263 tree));
264 #endif
265 #ifdef ARGS_GROW_DOWNWARD
266 static tree round_down PARAMS ((tree, int));
267 #endif
268 static rtx round_trampoline_addr PARAMS ((rtx));
269 static tree blocks_nreverse PARAMS ((tree));
270 static int all_blocks PARAMS ((tree, tree *));
271 static tree *get_block_vector PARAMS ((tree, int *));
272 /* We always define `record_insns' even if its not used so that we
273 can always export `prologue_epilogue_contains'. */
274 static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
275 static int contains PARAMS ((rtx, int *));
276 #ifdef HAVE_return
277 static void emit_return_into_block PARAMS ((basic_block));
278 #endif
279 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
280 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
281 struct hash_table *));
282 static int is_addressof PARAMS ((rtx *, void *));
283 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
284 struct hash_table *,
285 hash_table_key));
286 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
287 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
288 static int insns_for_mem_walk PARAMS ((rtx *, void *));
289 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
290 static void mark_temp_slot PARAMS ((struct temp_slot *));
291 static void mark_function_status PARAMS ((struct function *));
292 static void mark_function_chain PARAMS ((void *));
293 static void prepare_function_start PARAMS ((void));
294 static void do_clobber_return_reg PARAMS ((rtx, void *));
295 static void do_use_return_reg PARAMS ((rtx, void *));
297 /* Pointer to chain of `struct function' for containing functions. */
298 struct function *outer_function_chain;
300 /* Given a function decl for a containing function,
301 return the `struct function' for it. */
303 struct function *
304 find_function_data (decl)
305 tree decl;
307 struct function *p;
309 for (p = outer_function_chain; p; p = p->next)
310 if (p->decl == decl)
311 return p;
313 abort ();
316 /* Save the current context for compilation of a nested function.
317 This is called from language-specific code. The caller should use
318 the save_lang_status callback to save any language-specific state,
319 since this function knows only about language-independent
320 variables. */
322 void
323 push_function_context_to (context)
324 tree context;
326 struct function *p, *context_data;
328 if (context)
330 context_data = (context == current_function_decl
331 ? cfun
332 : find_function_data (context));
333 context_data->contains_functions = 1;
336 if (cfun == 0)
337 init_dummy_function_start ();
338 p = cfun;
340 p->next = outer_function_chain;
341 outer_function_chain = p;
342 p->fixup_var_refs_queue = 0;
344 save_tree_status (p);
345 if (save_lang_status)
346 (*save_lang_status) (p);
347 if (save_machine_status)
348 (*save_machine_status) (p);
350 cfun = 0;
353 void
354 push_function_context ()
356 push_function_context_to (current_function_decl);
359 /* Restore the last saved context, at the end of a nested function.
360 This function is called from language-specific code. */
362 void
363 pop_function_context_from (context)
364 tree context ATTRIBUTE_UNUSED;
366 struct function *p = outer_function_chain;
367 struct var_refs_queue *queue;
368 struct var_refs_queue *next;
370 cfun = p;
371 outer_function_chain = p->next;
373 current_function_decl = p->decl;
374 reg_renumber = 0;
376 restore_tree_status (p);
377 restore_emit_status (p);
379 if (restore_machine_status)
380 (*restore_machine_status) (p);
381 if (restore_lang_status)
382 (*restore_lang_status) (p);
384 /* Finish doing put_var_into_stack for any of our variables
385 which became addressable during the nested function. */
386 for (queue = p->fixup_var_refs_queue; queue; queue = next)
388 next = queue->next;
389 fixup_var_refs (queue->modified, queue->promoted_mode,
390 queue->unsignedp, 0);
391 free (queue);
393 p->fixup_var_refs_queue = 0;
395 /* Reset variables that have known state during rtx generation. */
396 rtx_equal_function_value_matters = 1;
397 virtuals_instantiated = 0;
400 void
401 pop_function_context ()
403 pop_function_context_from (current_function_decl);
406 /* Clear out all parts of the state in F that can safely be discarded
407 after the function has been parsed, but not compiled, to let
408 garbage collection reclaim the memory. */
410 void
411 free_after_parsing (f)
412 struct function *f;
414 /* f->expr->forced_labels is used by code generation. */
415 /* f->emit->regno_reg_rtx is used by code generation. */
416 /* f->varasm is used by code generation. */
417 /* f->eh->eh_return_stub_label is used by code generation. */
419 if (free_lang_status)
420 (*free_lang_status) (f);
421 free_stmt_status (f);
424 /* Clear out all parts of the state in F that can safely be discarded
425 after the function has been compiled, to let garbage collection
426 reclaim the memory. */
428 void
429 free_after_compilation (f)
430 struct function *f;
432 free_eh_status (f);
433 free_expr_status (f);
434 free_emit_status (f);
435 free_varasm_status (f);
437 if (free_machine_status)
438 (*free_machine_status) (f);
440 if (f->x_parm_reg_stack_loc)
441 free (f->x_parm_reg_stack_loc);
443 f->arg_offset_rtx = NULL;
444 f->return_rtx = NULL;
445 f->internal_arg_pointer = NULL;
446 f->x_nonlocal_labels = NULL;
447 f->x_nonlocal_goto_handler_slots = NULL;
448 f->x_nonlocal_goto_handler_labels = NULL;
449 f->x_nonlocal_goto_stack_level = NULL;
450 f->x_cleanup_label = NULL;
451 f->x_return_label = NULL;
452 f->x_save_expr_regs = NULL;
453 f->x_stack_slot_list = NULL;
454 f->x_rtl_expr_chain = NULL;
455 f->x_tail_recursion_label = NULL;
456 f->x_tail_recursion_reentry = NULL;
457 f->x_arg_pointer_save_area = NULL;
458 f->x_context_display = NULL;
459 f->x_trampoline_list = NULL;
460 f->x_parm_birth_insn = NULL;
461 f->x_last_parm_insn = NULL;
462 f->x_parm_reg_stack_loc = NULL;
463 f->x_temp_slots = NULL;
464 f->fixup_var_refs_queue = NULL;
465 f->original_arg_vector = NULL;
466 f->original_decl_initial = NULL;
467 f->inl_last_parm_insn = NULL;
468 f->epilogue_delay_list = NULL;
472 /* Allocate fixed slots in the stack frame of the current function. */
474 /* Return size needed for stack frame based on slots so far allocated in
475 function F.
476 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
477 the caller may have to do that. */
479 HOST_WIDE_INT
480 get_func_frame_size (f)
481 struct function *f;
483 #ifdef FRAME_GROWS_DOWNWARD
484 return -f->x_frame_offset;
485 #else
486 return f->x_frame_offset;
487 #endif
490 /* Return size needed for stack frame based on slots so far allocated.
491 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
492 the caller may have to do that. */
493 HOST_WIDE_INT
494 get_frame_size ()
496 return get_func_frame_size (cfun);
499 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
500 with machine mode MODE.
502 ALIGN controls the amount of alignment for the address of the slot:
503 0 means according to MODE,
504 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
505 positive specifies alignment boundary in bits.
507 We do not round to stack_boundary here.
509 FUNCTION specifies the function to allocate in. */
511 static rtx
512 assign_stack_local_1 (mode, size, align, function)
513 enum machine_mode mode;
514 HOST_WIDE_INT size;
515 int align;
516 struct function *function;
518 register rtx x, addr;
519 int bigend_correction = 0;
520 int alignment;
522 /* Allocate in the memory associated with the function in whose frame
523 we are assigning. */
524 if (function != cfun)
525 push_obstacks (function->function_obstack,
526 function->function_maybepermanent_obstack);
528 if (align == 0)
530 tree type;
532 alignment = GET_MODE_ALIGNMENT (mode);
533 if (mode == BLKmode)
534 alignment = BIGGEST_ALIGNMENT;
536 /* Allow the target to (possibly) increase the alignment of this
537 stack slot. */
538 type = type_for_mode (mode, 0);
539 if (type)
540 alignment = LOCAL_ALIGNMENT (type, alignment);
542 alignment /= BITS_PER_UNIT;
544 else if (align == -1)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 size = CEIL_ROUND (size, alignment);
549 else
550 alignment = align / BITS_PER_UNIT;
552 #ifdef FRAME_GROWS_DOWNWARD
553 function->x_frame_offset -= size;
554 #endif
556 /* Ignore alignment we can't do with expected alignment of the boundary. */
557 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
558 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
560 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
561 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
563 /* Round frame offset to that alignment.
564 We must be careful here, since FRAME_OFFSET might be negative and
565 division with a negative dividend isn't as well defined as we might
566 like. So we instead assume that ALIGNMENT is a power of two and
567 use logical operations which are unambiguous. */
568 #ifdef FRAME_GROWS_DOWNWARD
569 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
570 #else
571 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
572 #endif
574 /* On a big-endian machine, if we are allocating more space than we will use,
575 use the least significant bytes of those that are allocated. */
576 if (BYTES_BIG_ENDIAN && mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (function == cfun && virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 (frame_offset + bigend_correction
584 + STARTING_FRAME_OFFSET));
585 else
586 addr = plus_constant (virtual_stack_vars_rtx,
587 function->x_frame_offset + bigend_correction);
589 #ifndef FRAME_GROWS_DOWNWARD
590 function->x_frame_offset += size;
591 #endif
593 x = gen_rtx_MEM (mode, addr);
595 function->x_stack_slot_list
596 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
598 if (function != cfun)
599 pop_obstacks ();
601 return x;
604 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
605 current function. */
607 assign_stack_local (mode, size, align)
608 enum machine_mode mode;
609 HOST_WIDE_INT size;
610 int align;
612 return assign_stack_local_1 (mode, size, align, cfun);
615 /* Allocate a temporary stack slot and record it for possible later
616 reuse.
618 MODE is the machine mode to be given to the returned rtx.
620 SIZE is the size in units of the space required. We do no rounding here
621 since assign_stack_local will do any required rounding.
623 KEEP is 1 if this slot is to be retained after a call to
624 free_temp_slots. Automatic variables for a block are allocated
625 with this flag. KEEP is 2 if we allocate a longer term temporary,
626 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
627 if we are to allocate something at an inner level to be treated as
628 a variable in the block (e.g., a SAVE_EXPR).
630 TYPE is the type that will be used for the stack slot. */
632 static rtx
633 assign_stack_temp_for_type (mode, size, keep, type)
634 enum machine_mode mode;
635 HOST_WIDE_INT size;
636 int keep;
637 tree type;
639 int align;
640 int alias_set;
641 struct temp_slot *p, *best_p = 0;
643 /* If SIZE is -1 it means that somebody tried to allocate a temporary
644 of a variable size. */
645 if (size == -1)
646 abort ();
648 /* If we know the alias set for the memory that will be used, use
649 it. If there's no TYPE, then we don't know anything about the
650 alias set for the memory. */
651 if (type)
652 alias_set = get_alias_set (type);
653 else
654 alias_set = 0;
656 align = GET_MODE_ALIGNMENT (mode);
657 if (mode == BLKmode)
658 align = BIGGEST_ALIGNMENT;
660 if (! type)
661 type = type_for_mode (mode, 0);
662 if (type)
663 align = LOCAL_ALIGNMENT (type, align);
665 /* Try to find an available, already-allocated temporary of the proper
666 mode which meets the size and alignment requirements. Choose the
667 smallest one with the closest alignment. */
668 for (p = temp_slots; p; p = p->next)
669 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
670 && ! p->in_use
671 && (!flag_strict_aliasing
672 || (alias_set && p->alias_set == alias_set))
673 && (best_p == 0 || best_p->size > p->size
674 || (best_p->size == p->size && best_p->align > p->align)))
676 if (p->align == align && p->size == size)
678 best_p = 0;
679 break;
681 best_p = p;
684 /* Make our best, if any, the one to use. */
685 if (best_p)
687 /* If there are enough aligned bytes left over, make them into a new
688 temp_slot so that the extra bytes don't get wasted. Do this only
689 for BLKmode slots, so that we can be sure of the alignment. */
690 if (GET_MODE (best_p->slot) == BLKmode
691 /* We can't split slots if -fstrict-aliasing because the
692 information about the alias set for the new slot will be
693 lost. */
694 && !flag_strict_aliasing)
696 int alignment = best_p->align / BITS_PER_UNIT;
697 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
699 if (best_p->size - rounded_size >= alignment)
701 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
702 p->in_use = p->addr_taken = 0;
703 p->size = best_p->size - rounded_size;
704 p->base_offset = best_p->base_offset + rounded_size;
705 p->full_size = best_p->full_size - rounded_size;
706 p->slot = gen_rtx_MEM (BLKmode,
707 plus_constant (XEXP (best_p->slot, 0),
708 rounded_size));
709 p->align = best_p->align;
710 p->address = 0;
711 p->rtl_expr = 0;
712 p->next = temp_slots;
713 temp_slots = p;
715 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
716 stack_slot_list);
718 best_p->size = rounded_size;
719 best_p->full_size = rounded_size;
723 p = best_p;
726 /* If we still didn't find one, make a new temporary. */
727 if (p == 0)
729 HOST_WIDE_INT frame_offset_old = frame_offset;
731 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
733 /* We are passing an explicit alignment request to assign_stack_local.
734 One side effect of that is assign_stack_local will not round SIZE
735 to ensure the frame offset remains suitably aligned.
737 So for requests which depended on the rounding of SIZE, we go ahead
738 and round it now. We also make sure ALIGNMENT is at least
739 BIGGEST_ALIGNMENT. */
740 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
741 abort();
742 p->slot = assign_stack_local (mode,
743 (mode == BLKmode
744 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
745 : size),
746 align);
748 p->align = align;
749 p->alias_set = alias_set;
751 /* The following slot size computation is necessary because we don't
752 know the actual size of the temporary slot until assign_stack_local
753 has performed all the frame alignment and size rounding for the
754 requested temporary. Note that extra space added for alignment
755 can be either above or below this stack slot depending on which
756 way the frame grows. We include the extra space if and only if it
757 is above this slot. */
758 #ifdef FRAME_GROWS_DOWNWARD
759 p->size = frame_offset_old - frame_offset;
760 #else
761 p->size = size;
762 #endif
764 /* Now define the fields used by combine_temp_slots. */
765 #ifdef FRAME_GROWS_DOWNWARD
766 p->base_offset = frame_offset;
767 p->full_size = frame_offset_old - frame_offset;
768 #else
769 p->base_offset = frame_offset_old;
770 p->full_size = frame_offset - frame_offset_old;
771 #endif
772 p->address = 0;
773 p->next = temp_slots;
774 temp_slots = p;
777 p->in_use = 1;
778 p->addr_taken = 0;
779 p->rtl_expr = seq_rtl_expr;
781 if (keep == 2)
783 p->level = target_temp_slot_level;
784 p->keep = 0;
786 else if (keep == 3)
788 p->level = var_temp_slot_level;
789 p->keep = 0;
791 else
793 p->level = temp_slot_level;
794 p->keep = keep;
797 /* We may be reusing an old slot, so clear any MEM flags that may have been
798 set from before. */
799 RTX_UNCHANGING_P (p->slot) = 0;
800 MEM_IN_STRUCT_P (p->slot) = 0;
801 MEM_SCALAR_P (p->slot) = 0;
802 MEM_ALIAS_SET (p->slot) = 0;
803 return p->slot;
806 /* Allocate a temporary stack slot and record it for possible later
807 reuse. First three arguments are same as in preceding function. */
810 assign_stack_temp (mode, size, keep)
811 enum machine_mode mode;
812 HOST_WIDE_INT size;
813 int keep;
815 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
818 /* Assign a temporary of given TYPE.
819 KEEP is as for assign_stack_temp.
820 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
821 it is 0 if a register is OK.
822 DONT_PROMOTE is 1 if we should not promote values in register
823 to wider modes. */
826 assign_temp (type, keep, memory_required, dont_promote)
827 tree type;
828 int keep;
829 int memory_required;
830 int dont_promote ATTRIBUTE_UNUSED;
832 enum machine_mode mode = TYPE_MODE (type);
833 #ifndef PROMOTE_FOR_CALL_ONLY
834 int unsignedp = TREE_UNSIGNED (type);
835 #endif
837 if (mode == BLKmode || memory_required)
839 HOST_WIDE_INT size = int_size_in_bytes (type);
840 rtx tmp;
842 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
843 problems with allocating the stack space. */
844 if (size == 0)
845 size = 1;
847 /* Unfortunately, we don't yet know how to allocate variable-sized
848 temporaries. However, sometimes we have a fixed upper limit on
849 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
850 instead. This is the case for Chill variable-sized strings. */
851 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
852 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
853 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
854 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
856 tmp = assign_stack_temp_for_type (mode, size, keep, type);
857 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
858 return tmp;
861 #ifndef PROMOTE_FOR_CALL_ONLY
862 if (! dont_promote)
863 mode = promote_mode (type, mode, &unsignedp, 0);
864 #endif
866 return gen_reg_rtx (mode);
869 /* Combine temporary stack slots which are adjacent on the stack.
871 This allows for better use of already allocated stack space. This is only
872 done for BLKmode slots because we can be sure that we won't have alignment
873 problems in this case. */
875 void
876 combine_temp_slots ()
878 struct temp_slot *p, *q;
879 struct temp_slot *prev_p, *prev_q;
880 int num_slots;
882 /* We can't combine slots, because the information about which slot
883 is in which alias set will be lost. */
884 if (flag_strict_aliasing)
885 return;
887 /* If there are a lot of temp slots, don't do anything unless
888 high levels of optimizaton. */
889 if (! flag_expensive_optimizations)
890 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
891 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
892 return;
894 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
896 int delete_p = 0;
898 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
899 for (q = p->next, prev_q = p; q; q = prev_q->next)
901 int delete_q = 0;
902 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
904 if (p->base_offset + p->full_size == q->base_offset)
906 /* Q comes after P; combine Q into P. */
907 p->size += q->size;
908 p->full_size += q->full_size;
909 delete_q = 1;
911 else if (q->base_offset + q->full_size == p->base_offset)
913 /* P comes after Q; combine P into Q. */
914 q->size += p->size;
915 q->full_size += p->full_size;
916 delete_p = 1;
917 break;
920 /* Either delete Q or advance past it. */
921 if (delete_q)
922 prev_q->next = q->next;
923 else
924 prev_q = q;
926 /* Either delete P or advance past it. */
927 if (delete_p)
929 if (prev_p)
930 prev_p->next = p->next;
931 else
932 temp_slots = p->next;
934 else
935 prev_p = p;
939 /* Find the temp slot corresponding to the object at address X. */
941 static struct temp_slot *
942 find_temp_slot_from_address (x)
943 rtx x;
945 struct temp_slot *p;
946 rtx next;
948 for (p = temp_slots; p; p = p->next)
950 if (! p->in_use)
951 continue;
953 else if (XEXP (p->slot, 0) == x
954 || p->address == x
955 || (GET_CODE (x) == PLUS
956 && XEXP (x, 0) == virtual_stack_vars_rtx
957 && GET_CODE (XEXP (x, 1)) == CONST_INT
958 && INTVAL (XEXP (x, 1)) >= p->base_offset
959 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
960 return p;
962 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
963 for (next = p->address; next; next = XEXP (next, 1))
964 if (XEXP (next, 0) == x)
965 return p;
968 /* If we have a sum involving a register, see if it points to a temp
969 slot. */
970 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
971 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
972 return p;
973 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
974 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
975 return p;
977 return 0;
980 /* Indicate that NEW is an alternate way of referring to the temp slot
981 that previously was known by OLD. */
983 void
984 update_temp_slot_address (old, new)
985 rtx old, new;
987 struct temp_slot *p;
989 if (rtx_equal_p (old, new))
990 return;
992 p = find_temp_slot_from_address (old);
994 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
995 is a register, see if one operand of the PLUS is a temporary
996 location. If so, NEW points into it. Otherwise, if both OLD and
997 NEW are a PLUS and if there is a register in common between them.
998 If so, try a recursive call on those values. */
999 if (p == 0)
1001 if (GET_CODE (old) != PLUS)
1002 return;
1004 if (GET_CODE (new) == REG)
1006 update_temp_slot_address (XEXP (old, 0), new);
1007 update_temp_slot_address (XEXP (old, 1), new);
1008 return;
1010 else if (GET_CODE (new) != PLUS)
1011 return;
1013 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1014 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1015 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1016 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1017 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1018 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1019 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1020 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1022 return;
1025 /* Otherwise add an alias for the temp's address. */
1026 else if (p->address == 0)
1027 p->address = new;
1028 else
1030 if (GET_CODE (p->address) != EXPR_LIST)
1031 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1033 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1037 /* If X could be a reference to a temporary slot, mark the fact that its
1038 address was taken. */
1040 void
1041 mark_temp_addr_taken (x)
1042 rtx x;
1044 struct temp_slot *p;
1046 if (x == 0)
1047 return;
1049 /* If X is not in memory or is at a constant address, it cannot be in
1050 a temporary slot. */
1051 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1052 return;
1054 p = find_temp_slot_from_address (XEXP (x, 0));
1055 if (p != 0)
1056 p->addr_taken = 1;
1059 /* If X could be a reference to a temporary slot, mark that slot as
1060 belonging to the to one level higher than the current level. If X
1061 matched one of our slots, just mark that one. Otherwise, we can't
1062 easily predict which it is, so upgrade all of them. Kept slots
1063 need not be touched.
1065 This is called when an ({...}) construct occurs and a statement
1066 returns a value in memory. */
1068 void
1069 preserve_temp_slots (x)
1070 rtx x;
1072 struct temp_slot *p = 0;
1074 /* If there is no result, we still might have some objects whose address
1075 were taken, so we need to make sure they stay around. */
1076 if (x == 0)
1078 for (p = temp_slots; p; p = p->next)
1079 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1080 p->level--;
1082 return;
1085 /* If X is a register that is being used as a pointer, see if we have
1086 a temporary slot we know it points to. To be consistent with
1087 the code below, we really should preserve all non-kept slots
1088 if we can't find a match, but that seems to be much too costly. */
1089 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1090 p = find_temp_slot_from_address (x);
1092 /* If X is not in memory or is at a constant address, it cannot be in
1093 a temporary slot, but it can contain something whose address was
1094 taken. */
1095 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1097 for (p = temp_slots; p; p = p->next)
1098 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1099 p->level--;
1101 return;
1104 /* First see if we can find a match. */
1105 if (p == 0)
1106 p = find_temp_slot_from_address (XEXP (x, 0));
1108 if (p != 0)
1110 /* Move everything at our level whose address was taken to our new
1111 level in case we used its address. */
1112 struct temp_slot *q;
1114 if (p->level == temp_slot_level)
1116 for (q = temp_slots; q; q = q->next)
1117 if (q != p && q->addr_taken && q->level == p->level)
1118 q->level--;
1120 p->level--;
1121 p->addr_taken = 0;
1123 return;
1126 /* Otherwise, preserve all non-kept slots at this level. */
1127 for (p = temp_slots; p; p = p->next)
1128 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1129 p->level--;
1132 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1133 with that RTL_EXPR, promote it into a temporary slot at the present
1134 level so it will not be freed when we free slots made in the
1135 RTL_EXPR. */
1137 void
1138 preserve_rtl_expr_result (x)
1139 rtx x;
1141 struct temp_slot *p;
1143 /* If X is not in memory or is at a constant address, it cannot be in
1144 a temporary slot. */
1145 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1146 return;
1148 /* If we can find a match, move it to our level unless it is already at
1149 an upper level. */
1150 p = find_temp_slot_from_address (XEXP (x, 0));
1151 if (p != 0)
1153 p->level = MIN (p->level, temp_slot_level);
1154 p->rtl_expr = 0;
1157 return;
1160 /* Free all temporaries used so far. This is normally called at the end
1161 of generating code for a statement. Don't free any temporaries
1162 currently in use for an RTL_EXPR that hasn't yet been emitted.
1163 We could eventually do better than this since it can be reused while
1164 generating the same RTL_EXPR, but this is complex and probably not
1165 worthwhile. */
1167 void
1168 free_temp_slots ()
1170 struct temp_slot *p;
1172 for (p = temp_slots; p; p = p->next)
1173 if (p->in_use && p->level == temp_slot_level && ! p->keep
1174 && p->rtl_expr == 0)
1175 p->in_use = 0;
1177 combine_temp_slots ();
1180 /* Free all temporary slots used in T, an RTL_EXPR node. */
1182 void
1183 free_temps_for_rtl_expr (t)
1184 tree t;
1186 struct temp_slot *p;
1188 for (p = temp_slots; p; p = p->next)
1189 if (p->rtl_expr == t)
1191 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1192 needs to be preserved. This can happen if a temporary in
1193 the RTL_EXPR was addressed; preserve_temp_slots will move
1194 the temporary into a higher level. */
1195 if (temp_slot_level <= p->level)
1196 p->in_use = 0;
1197 else
1198 p->rtl_expr = NULL_TREE;
1201 combine_temp_slots ();
1204 /* Mark all temporaries ever allocated in this function as not suitable
1205 for reuse until the current level is exited. */
1207 void
1208 mark_all_temps_used ()
1210 struct temp_slot *p;
1212 for (p = temp_slots; p; p = p->next)
1214 p->in_use = p->keep = 1;
1215 p->level = MIN (p->level, temp_slot_level);
1219 /* Push deeper into the nesting level for stack temporaries. */
1221 void
1222 push_temp_slots ()
1224 temp_slot_level++;
1227 /* Likewise, but save the new level as the place to allocate variables
1228 for blocks. */
1230 #if 0
1231 void
1232 push_temp_slots_for_block ()
1234 push_temp_slots ();
1236 var_temp_slot_level = temp_slot_level;
1239 /* Likewise, but save the new level as the place to allocate temporaries
1240 for TARGET_EXPRs. */
1242 void
1243 push_temp_slots_for_target ()
1245 push_temp_slots ();
1247 target_temp_slot_level = temp_slot_level;
1250 /* Set and get the value of target_temp_slot_level. The only
1251 permitted use of these functions is to save and restore this value. */
1254 get_target_temp_slot_level ()
1256 return target_temp_slot_level;
1259 void
1260 set_target_temp_slot_level (level)
1261 int level;
1263 target_temp_slot_level = level;
1265 #endif
1267 /* Pop a temporary nesting level. All slots in use in the current level
1268 are freed. */
1270 void
1271 pop_temp_slots ()
1273 struct temp_slot *p;
1275 for (p = temp_slots; p; p = p->next)
1276 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1277 p->in_use = 0;
1279 combine_temp_slots ();
1281 temp_slot_level--;
1284 /* Initialize temporary slots. */
1286 void
1287 init_temp_slots ()
1289 /* We have not allocated any temporaries yet. */
1290 temp_slots = 0;
1291 temp_slot_level = 0;
1292 var_temp_slot_level = 0;
1293 target_temp_slot_level = 0;
1296 /* Retroactively move an auto variable from a register to a stack slot.
1297 This is done when an address-reference to the variable is seen. */
1299 void
1300 put_var_into_stack (decl)
1301 tree decl;
1303 register rtx reg;
1304 enum machine_mode promoted_mode, decl_mode;
1305 struct function *function = 0;
1306 tree context;
1307 int can_use_addressof;
1309 context = decl_function_context (decl);
1311 /* Get the current rtl used for this object and its original mode. */
1312 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1314 /* No need to do anything if decl has no rtx yet
1315 since in that case caller is setting TREE_ADDRESSABLE
1316 and a stack slot will be assigned when the rtl is made. */
1317 if (reg == 0)
1318 return;
1320 /* Get the declared mode for this object. */
1321 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1322 : DECL_MODE (decl));
1323 /* Get the mode it's actually stored in. */
1324 promoted_mode = GET_MODE (reg);
1326 /* If this variable comes from an outer function,
1327 find that function's saved context. */
1328 if (context != current_function_decl && context != inline_function_decl)
1329 for (function = outer_function_chain; function; function = function->next)
1330 if (function->decl == context)
1331 break;
1333 /* If this is a variable-size object with a pseudo to address it,
1334 put that pseudo into the stack, if the var is nonlocal. */
1335 if (DECL_NONLOCAL (decl)
1336 && GET_CODE (reg) == MEM
1337 && GET_CODE (XEXP (reg, 0)) == REG
1338 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1340 reg = XEXP (reg, 0);
1341 decl_mode = promoted_mode = GET_MODE (reg);
1344 can_use_addressof
1345 = (function == 0
1346 && optimize > 0
1347 /* FIXME make it work for promoted modes too */
1348 && decl_mode == promoted_mode
1349 #ifdef NON_SAVING_SETJMP
1350 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1351 #endif
1354 /* If we can't use ADDRESSOF, make sure we see through one we already
1355 generated. */
1356 if (! can_use_addressof && GET_CODE (reg) == MEM
1357 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1358 reg = XEXP (XEXP (reg, 0), 0);
1360 /* Now we should have a value that resides in one or more pseudo regs. */
1362 if (GET_CODE (reg) == REG)
1364 /* If this variable lives in the current function and we don't need
1365 to put things in the stack for the sake of setjmp, try to keep it
1366 in a register until we know we actually need the address. */
1367 if (can_use_addressof)
1368 gen_mem_addressof (reg, decl);
1369 else
1370 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1371 promoted_mode, decl_mode,
1372 TREE_SIDE_EFFECTS (decl), 0,
1373 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1376 else if (GET_CODE (reg) == CONCAT)
1378 /* A CONCAT contains two pseudos; put them both in the stack.
1379 We do it so they end up consecutive. */
1380 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1381 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1382 #ifdef FRAME_GROWS_DOWNWARD
1383 /* Since part 0 should have a lower address, do it second. */
1384 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1385 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1386 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1388 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1389 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1390 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1392 #else
1393 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1394 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1395 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1397 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1398 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1399 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1401 #endif
1403 /* Change the CONCAT into a combined MEM for both parts. */
1404 PUT_CODE (reg, MEM);
1405 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1406 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1408 /* The two parts are in memory order already.
1409 Use the lower parts address as ours. */
1410 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1411 /* Prevent sharing of rtl that might lose. */
1412 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1413 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1415 else
1416 return;
1418 if (current_function_check_memory_usage)
1419 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1420 XEXP (reg, 0), Pmode,
1421 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1422 TYPE_MODE (sizetype),
1423 GEN_INT (MEMORY_USE_RW),
1424 TYPE_MODE (integer_type_node));
1427 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1428 into the stack frame of FUNCTION (0 means the current function).
1429 DECL_MODE is the machine mode of the user-level data type.
1430 PROMOTED_MODE is the machine mode of the register.
1431 VOLATILE_P is nonzero if this is for a "volatile" decl.
1432 USED_P is nonzero if this reg might have already been used in an insn. */
1434 static void
1435 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1436 original_regno, used_p, ht)
1437 struct function *function;
1438 rtx reg;
1439 tree type;
1440 enum machine_mode promoted_mode, decl_mode;
1441 int volatile_p;
1442 int original_regno;
1443 int used_p;
1444 struct hash_table *ht;
1446 struct function *func = function ? function : cfun;
1447 rtx new = 0;
1448 int regno = original_regno;
1450 if (regno == 0)
1451 regno = REGNO (reg);
1453 if (regno < func->x_max_parm_reg)
1454 new = func->x_parm_reg_stack_loc[regno];
1455 if (new == 0)
1456 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1458 PUT_CODE (reg, MEM);
1459 PUT_MODE (reg, decl_mode);
1460 XEXP (reg, 0) = XEXP (new, 0);
1461 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1462 MEM_VOLATILE_P (reg) = volatile_p;
1464 /* If this is a memory ref that contains aggregate components,
1465 mark it as such for cse and loop optimize. If we are reusing a
1466 previously generated stack slot, then we need to copy the bit in
1467 case it was set for other reasons. For instance, it is set for
1468 __builtin_va_alist. */
1469 MEM_SET_IN_STRUCT_P (reg,
1470 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1471 MEM_ALIAS_SET (reg) = get_alias_set (type);
1473 /* Now make sure that all refs to the variable, previously made
1474 when it was a register, are fixed up to be valid again. */
1476 if (used_p && function != 0)
1478 struct var_refs_queue *temp;
1480 temp
1481 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1482 temp->modified = reg;
1483 temp->promoted_mode = promoted_mode;
1484 temp->unsignedp = TREE_UNSIGNED (type);
1485 temp->next = function->fixup_var_refs_queue;
1486 function->fixup_var_refs_queue = temp;
1488 else if (used_p)
1489 /* Variable is local; fix it up now. */
1490 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1493 static void
1494 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1495 rtx var;
1496 enum machine_mode promoted_mode;
1497 int unsignedp;
1498 struct hash_table *ht;
1500 tree pending;
1501 rtx first_insn = get_insns ();
1502 struct sequence_stack *stack = seq_stack;
1503 tree rtl_exps = rtl_expr_chain;
1505 /* Must scan all insns for stack-refs that exceed the limit. */
1506 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1507 stack == 0, ht);
1508 /* If there's a hash table, it must record all uses of VAR. */
1509 if (ht)
1510 return;
1512 /* Scan all pending sequences too. */
1513 for (; stack; stack = stack->next)
1515 push_to_sequence (stack->first);
1516 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1517 stack->first, stack->next != 0, 0);
1518 /* Update remembered end of sequence
1519 in case we added an insn at the end. */
1520 stack->last = get_last_insn ();
1521 end_sequence ();
1524 /* Scan all waiting RTL_EXPRs too. */
1525 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1527 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1528 if (seq != const0_rtx && seq != 0)
1530 push_to_sequence (seq);
1531 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1533 end_sequence ();
1537 /* Scan the catch clauses for exception handling too. */
1538 push_to_sequence (catch_clauses);
1539 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1540 0, 0);
1541 end_sequence ();
1544 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1545 some part of an insn. Return a struct fixup_replacement whose OLD
1546 value is equal to X. Allocate a new structure if no such entry exists. */
1548 static struct fixup_replacement *
1549 find_fixup_replacement (replacements, x)
1550 struct fixup_replacement **replacements;
1551 rtx x;
1553 struct fixup_replacement *p;
1555 /* See if we have already replaced this. */
1556 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1559 if (p == 0)
1561 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1562 p->old = x;
1563 p->new = 0;
1564 p->next = *replacements;
1565 *replacements = p;
1568 return p;
1571 /* Scan the insn-chain starting with INSN for refs to VAR
1572 and fix them up. TOPLEVEL is nonzero if this chain is the
1573 main chain of insns for the current function. */
1575 static void
1576 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1577 rtx var;
1578 enum machine_mode promoted_mode;
1579 int unsignedp;
1580 rtx insn;
1581 int toplevel;
1582 struct hash_table *ht;
1584 rtx call_dest = 0;
1585 rtx insn_list = NULL_RTX;
1587 /* If we already know which INSNs reference VAR there's no need
1588 to walk the entire instruction chain. */
1589 if (ht)
1591 insn_list = ((struct insns_for_mem_entry *)
1592 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1593 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1594 insn_list = XEXP (insn_list, 1);
1597 while (insn)
1599 rtx next = NEXT_INSN (insn);
1600 rtx set, prev, prev_set;
1601 rtx note;
1603 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1605 /* Remember the notes in case we delete the insn. */
1606 note = REG_NOTES (insn);
1608 /* If this is a CLOBBER of VAR, delete it.
1610 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1611 and REG_RETVAL notes too. */
1612 if (GET_CODE (PATTERN (insn)) == CLOBBER
1613 && (XEXP (PATTERN (insn), 0) == var
1614 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1615 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1616 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1618 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1619 /* The REG_LIBCALL note will go away since we are going to
1620 turn INSN into a NOTE, so just delete the
1621 corresponding REG_RETVAL note. */
1622 remove_note (XEXP (note, 0),
1623 find_reg_note (XEXP (note, 0), REG_RETVAL,
1624 NULL_RTX));
1626 /* In unoptimized compilation, we shouldn't call delete_insn
1627 except in jump.c doing warnings. */
1628 PUT_CODE (insn, NOTE);
1629 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1630 NOTE_SOURCE_FILE (insn) = 0;
1633 /* The insn to load VAR from a home in the arglist
1634 is now a no-op. When we see it, just delete it.
1635 Similarly if this is storing VAR from a register from which
1636 it was loaded in the previous insn. This will occur
1637 when an ADDRESSOF was made for an arglist slot. */
1638 else if (toplevel
1639 && (set = single_set (insn)) != 0
1640 && SET_DEST (set) == var
1641 /* If this represents the result of an insn group,
1642 don't delete the insn. */
1643 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1644 && (rtx_equal_p (SET_SRC (set), var)
1645 || (GET_CODE (SET_SRC (set)) == REG
1646 && (prev = prev_nonnote_insn (insn)) != 0
1647 && (prev_set = single_set (prev)) != 0
1648 && SET_DEST (prev_set) == SET_SRC (set)
1649 && rtx_equal_p (SET_SRC (prev_set), var))))
1651 /* In unoptimized compilation, we shouldn't call delete_insn
1652 except in jump.c doing warnings. */
1653 PUT_CODE (insn, NOTE);
1654 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1655 NOTE_SOURCE_FILE (insn) = 0;
1656 if (insn == last_parm_insn)
1657 last_parm_insn = PREV_INSN (next);
1659 else
1661 struct fixup_replacement *replacements = 0;
1662 rtx next_insn = NEXT_INSN (insn);
1664 if (SMALL_REGISTER_CLASSES)
1666 /* If the insn that copies the results of a CALL_INSN
1667 into a pseudo now references VAR, we have to use an
1668 intermediate pseudo since we want the life of the
1669 return value register to be only a single insn.
1671 If we don't use an intermediate pseudo, such things as
1672 address computations to make the address of VAR valid
1673 if it is not can be placed between the CALL_INSN and INSN.
1675 To make sure this doesn't happen, we record the destination
1676 of the CALL_INSN and see if the next insn uses both that
1677 and VAR. */
1679 if (call_dest != 0 && GET_CODE (insn) == INSN
1680 && reg_mentioned_p (var, PATTERN (insn))
1681 && reg_mentioned_p (call_dest, PATTERN (insn)))
1683 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1685 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1687 PATTERN (insn) = replace_rtx (PATTERN (insn),
1688 call_dest, temp);
1691 if (GET_CODE (insn) == CALL_INSN
1692 && GET_CODE (PATTERN (insn)) == SET)
1693 call_dest = SET_DEST (PATTERN (insn));
1694 else if (GET_CODE (insn) == CALL_INSN
1695 && GET_CODE (PATTERN (insn)) == PARALLEL
1696 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1697 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1698 else
1699 call_dest = 0;
1702 /* See if we have to do anything to INSN now that VAR is in
1703 memory. If it needs to be loaded into a pseudo, use a single
1704 pseudo for the entire insn in case there is a MATCH_DUP
1705 between two operands. We pass a pointer to the head of
1706 a list of struct fixup_replacements. If fixup_var_refs_1
1707 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1708 it will record them in this list.
1710 If it allocated a pseudo for any replacement, we copy into
1711 it here. */
1713 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1714 &replacements);
1716 /* If this is last_parm_insn, and any instructions were output
1717 after it to fix it up, then we must set last_parm_insn to
1718 the last such instruction emitted. */
1719 if (insn == last_parm_insn)
1720 last_parm_insn = PREV_INSN (next_insn);
1722 while (replacements)
1724 if (GET_CODE (replacements->new) == REG)
1726 rtx insert_before;
1727 rtx seq;
1729 /* OLD might be a (subreg (mem)). */
1730 if (GET_CODE (replacements->old) == SUBREG)
1731 replacements->old
1732 = fixup_memory_subreg (replacements->old, insn, 0);
1733 else
1734 replacements->old
1735 = fixup_stack_1 (replacements->old, insn);
1737 insert_before = insn;
1739 /* If we are changing the mode, do a conversion.
1740 This might be wasteful, but combine.c will
1741 eliminate much of the waste. */
1743 if (GET_MODE (replacements->new)
1744 != GET_MODE (replacements->old))
1746 start_sequence ();
1747 convert_move (replacements->new,
1748 replacements->old, unsignedp);
1749 seq = gen_sequence ();
1750 end_sequence ();
1752 else
1753 seq = gen_move_insn (replacements->new,
1754 replacements->old);
1756 emit_insn_before (seq, insert_before);
1759 replacements = replacements->next;
1763 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1764 But don't touch other insns referred to by reg-notes;
1765 we will get them elsewhere. */
1766 while (note)
1768 if (GET_CODE (note) != INSN_LIST)
1769 XEXP (note, 0)
1770 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1771 note = XEXP (note, 1);
1775 if (!ht)
1776 insn = next;
1777 else if (insn_list)
1779 insn = XEXP (insn_list, 0);
1780 insn_list = XEXP (insn_list, 1);
1782 else
1783 insn = NULL_RTX;
1787 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1788 See if the rtx expression at *LOC in INSN needs to be changed.
1790 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1791 contain a list of original rtx's and replacements. If we find that we need
1792 to modify this insn by replacing a memory reference with a pseudo or by
1793 making a new MEM to implement a SUBREG, we consult that list to see if
1794 we have already chosen a replacement. If none has already been allocated,
1795 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1796 or the SUBREG, as appropriate, to the pseudo. */
1798 static void
1799 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1800 register rtx var;
1801 enum machine_mode promoted_mode;
1802 register rtx *loc;
1803 rtx insn;
1804 struct fixup_replacement **replacements;
1806 register int i;
1807 register rtx x = *loc;
1808 RTX_CODE code = GET_CODE (x);
1809 register const char *fmt;
1810 register rtx tem, tem1;
1811 struct fixup_replacement *replacement;
1813 switch (code)
1815 case ADDRESSOF:
1816 if (XEXP (x, 0) == var)
1818 /* Prevent sharing of rtl that might lose. */
1819 rtx sub = copy_rtx (XEXP (var, 0));
1821 if (! validate_change (insn, loc, sub, 0))
1823 rtx y = gen_reg_rtx (GET_MODE (sub));
1824 rtx seq, new_insn;
1826 /* We should be able to replace with a register or all is lost.
1827 Note that we can't use validate_change to verify this, since
1828 we're not caring for replacing all dups simultaneously. */
1829 if (! validate_replace_rtx (*loc, y, insn))
1830 abort ();
1832 /* Careful! First try to recognize a direct move of the
1833 value, mimicking how things are done in gen_reload wrt
1834 PLUS. Consider what happens when insn is a conditional
1835 move instruction and addsi3 clobbers flags. */
1837 start_sequence ();
1838 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1839 seq = gen_sequence ();
1840 end_sequence ();
1842 if (recog_memoized (new_insn) < 0)
1844 /* That failed. Fall back on force_operand and hope. */
1846 start_sequence ();
1847 force_operand (sub, y);
1848 seq = gen_sequence ();
1849 end_sequence ();
1852 #ifdef HAVE_cc0
1853 /* Don't separate setter from user. */
1854 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1855 insn = PREV_INSN (insn);
1856 #endif
1858 emit_insn_before (seq, insn);
1861 return;
1863 case MEM:
1864 if (var == x)
1866 /* If we already have a replacement, use it. Otherwise,
1867 try to fix up this address in case it is invalid. */
1869 replacement = find_fixup_replacement (replacements, var);
1870 if (replacement->new)
1872 *loc = replacement->new;
1873 return;
1876 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1878 /* Unless we are forcing memory to register or we changed the mode,
1879 we can leave things the way they are if the insn is valid. */
1881 INSN_CODE (insn) = -1;
1882 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1883 && recog_memoized (insn) >= 0)
1884 return;
1886 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1887 return;
1890 /* If X contains VAR, we need to unshare it here so that we update
1891 each occurrence separately. But all identical MEMs in one insn
1892 must be replaced with the same rtx because of the possibility of
1893 MATCH_DUPs. */
1895 if (reg_mentioned_p (var, x))
1897 replacement = find_fixup_replacement (replacements, x);
1898 if (replacement->new == 0)
1899 replacement->new = copy_most_rtx (x, var);
1901 *loc = x = replacement->new;
1903 break;
1905 case REG:
1906 case CC0:
1907 case PC:
1908 case CONST_INT:
1909 case CONST:
1910 case SYMBOL_REF:
1911 case LABEL_REF:
1912 case CONST_DOUBLE:
1913 return;
1915 case SIGN_EXTRACT:
1916 case ZERO_EXTRACT:
1917 /* Note that in some cases those types of expressions are altered
1918 by optimize_bit_field, and do not survive to get here. */
1919 if (XEXP (x, 0) == var
1920 || (GET_CODE (XEXP (x, 0)) == SUBREG
1921 && SUBREG_REG (XEXP (x, 0)) == var))
1923 /* Get TEM as a valid MEM in the mode presently in the insn.
1925 We don't worry about the possibility of MATCH_DUP here; it
1926 is highly unlikely and would be tricky to handle. */
1928 tem = XEXP (x, 0);
1929 if (GET_CODE (tem) == SUBREG)
1931 if (GET_MODE_BITSIZE (GET_MODE (tem))
1932 > GET_MODE_BITSIZE (GET_MODE (var)))
1934 replacement = find_fixup_replacement (replacements, var);
1935 if (replacement->new == 0)
1936 replacement->new = gen_reg_rtx (GET_MODE (var));
1937 SUBREG_REG (tem) = replacement->new;
1939 else
1940 tem = fixup_memory_subreg (tem, insn, 0);
1942 else
1943 tem = fixup_stack_1 (tem, insn);
1945 /* Unless we want to load from memory, get TEM into the proper mode
1946 for an extract from memory. This can only be done if the
1947 extract is at a constant position and length. */
1949 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1950 && GET_CODE (XEXP (x, 2)) == CONST_INT
1951 && ! mode_dependent_address_p (XEXP (tem, 0))
1952 && ! MEM_VOLATILE_P (tem))
1954 enum machine_mode wanted_mode = VOIDmode;
1955 enum machine_mode is_mode = GET_MODE (tem);
1956 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1958 #ifdef HAVE_extzv
1959 if (GET_CODE (x) == ZERO_EXTRACT)
1961 wanted_mode
1962 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1963 if (wanted_mode == VOIDmode)
1964 wanted_mode = word_mode;
1966 #endif
1967 #ifdef HAVE_extv
1968 if (GET_CODE (x) == SIGN_EXTRACT)
1970 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1971 if (wanted_mode == VOIDmode)
1972 wanted_mode = word_mode;
1974 #endif
1975 /* If we have a narrower mode, we can do something. */
1976 if (wanted_mode != VOIDmode
1977 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1979 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1980 rtx old_pos = XEXP (x, 2);
1981 rtx newmem;
1983 /* If the bytes and bits are counted differently, we
1984 must adjust the offset. */
1985 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1986 offset = (GET_MODE_SIZE (is_mode)
1987 - GET_MODE_SIZE (wanted_mode) - offset);
1989 pos %= GET_MODE_BITSIZE (wanted_mode);
1991 newmem = gen_rtx_MEM (wanted_mode,
1992 plus_constant (XEXP (tem, 0), offset));
1993 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1994 MEM_COPY_ATTRIBUTES (newmem, tem);
1996 /* Make the change and see if the insn remains valid. */
1997 INSN_CODE (insn) = -1;
1998 XEXP (x, 0) = newmem;
1999 XEXP (x, 2) = GEN_INT (pos);
2001 if (recog_memoized (insn) >= 0)
2002 return;
2004 /* Otherwise, restore old position. XEXP (x, 0) will be
2005 restored later. */
2006 XEXP (x, 2) = old_pos;
2010 /* If we get here, the bitfield extract insn can't accept a memory
2011 reference. Copy the input into a register. */
2013 tem1 = gen_reg_rtx (GET_MODE (tem));
2014 emit_insn_before (gen_move_insn (tem1, tem), insn);
2015 XEXP (x, 0) = tem1;
2016 return;
2018 break;
2020 case SUBREG:
2021 if (SUBREG_REG (x) == var)
2023 /* If this is a special SUBREG made because VAR was promoted
2024 from a wider mode, replace it with VAR and call ourself
2025 recursively, this time saying that the object previously
2026 had its current mode (by virtue of the SUBREG). */
2028 if (SUBREG_PROMOTED_VAR_P (x))
2030 *loc = var;
2031 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2032 return;
2035 /* If this SUBREG makes VAR wider, it has become a paradoxical
2036 SUBREG with VAR in memory, but these aren't allowed at this
2037 stage of the compilation. So load VAR into a pseudo and take
2038 a SUBREG of that pseudo. */
2039 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2041 replacement = find_fixup_replacement (replacements, var);
2042 if (replacement->new == 0)
2043 replacement->new = gen_reg_rtx (GET_MODE (var));
2044 SUBREG_REG (x) = replacement->new;
2045 return;
2048 /* See if we have already found a replacement for this SUBREG.
2049 If so, use it. Otherwise, make a MEM and see if the insn
2050 is recognized. If not, or if we should force MEM into a register,
2051 make a pseudo for this SUBREG. */
2052 replacement = find_fixup_replacement (replacements, x);
2053 if (replacement->new)
2055 *loc = replacement->new;
2056 return;
2059 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2061 INSN_CODE (insn) = -1;
2062 if (! flag_force_mem && recog_memoized (insn) >= 0)
2063 return;
2065 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2066 return;
2068 break;
2070 case SET:
2071 /* First do special simplification of bit-field references. */
2072 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2073 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2074 optimize_bit_field (x, insn, 0);
2075 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2076 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2077 optimize_bit_field (x, insn, NULL_PTR);
2079 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2080 into a register and then store it back out. */
2081 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2082 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2083 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2084 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2085 > GET_MODE_SIZE (GET_MODE (var))))
2087 replacement = find_fixup_replacement (replacements, var);
2088 if (replacement->new == 0)
2089 replacement->new = gen_reg_rtx (GET_MODE (var));
2091 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2092 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2095 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2096 insn into a pseudo and store the low part of the pseudo into VAR. */
2097 if (GET_CODE (SET_DEST (x)) == SUBREG
2098 && SUBREG_REG (SET_DEST (x)) == var
2099 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2100 > GET_MODE_SIZE (GET_MODE (var))))
2102 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2103 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2104 tem)),
2105 insn);
2106 break;
2110 rtx dest = SET_DEST (x);
2111 rtx src = SET_SRC (x);
2112 #ifdef HAVE_insv
2113 rtx outerdest = dest;
2114 #endif
2116 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2117 || GET_CODE (dest) == SIGN_EXTRACT
2118 || GET_CODE (dest) == ZERO_EXTRACT)
2119 dest = XEXP (dest, 0);
2121 if (GET_CODE (src) == SUBREG)
2122 src = XEXP (src, 0);
2124 /* If VAR does not appear at the top level of the SET
2125 just scan the lower levels of the tree. */
2127 if (src != var && dest != var)
2128 break;
2130 /* We will need to rerecognize this insn. */
2131 INSN_CODE (insn) = -1;
2133 #ifdef HAVE_insv
2134 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2136 /* Since this case will return, ensure we fixup all the
2137 operands here. */
2138 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2139 insn, replacements);
2140 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2141 insn, replacements);
2142 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2143 insn, replacements);
2145 tem = XEXP (outerdest, 0);
2147 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2148 that may appear inside a ZERO_EXTRACT.
2149 This was legitimate when the MEM was a REG. */
2150 if (GET_CODE (tem) == SUBREG
2151 && SUBREG_REG (tem) == var)
2152 tem = fixup_memory_subreg (tem, insn, 0);
2153 else
2154 tem = fixup_stack_1 (tem, insn);
2156 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2157 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2158 && ! mode_dependent_address_p (XEXP (tem, 0))
2159 && ! MEM_VOLATILE_P (tem))
2161 enum machine_mode wanted_mode;
2162 enum machine_mode is_mode = GET_MODE (tem);
2163 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2165 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2166 if (wanted_mode == VOIDmode)
2167 wanted_mode = word_mode;
2169 /* If we have a narrower mode, we can do something. */
2170 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2172 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2173 rtx old_pos = XEXP (outerdest, 2);
2174 rtx newmem;
2176 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2177 offset = (GET_MODE_SIZE (is_mode)
2178 - GET_MODE_SIZE (wanted_mode) - offset);
2180 pos %= GET_MODE_BITSIZE (wanted_mode);
2182 newmem = gen_rtx_MEM (wanted_mode,
2183 plus_constant (XEXP (tem, 0),
2184 offset));
2185 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2186 MEM_COPY_ATTRIBUTES (newmem, tem);
2188 /* Make the change and see if the insn remains valid. */
2189 INSN_CODE (insn) = -1;
2190 XEXP (outerdest, 0) = newmem;
2191 XEXP (outerdest, 2) = GEN_INT (pos);
2193 if (recog_memoized (insn) >= 0)
2194 return;
2196 /* Otherwise, restore old position. XEXP (x, 0) will be
2197 restored later. */
2198 XEXP (outerdest, 2) = old_pos;
2202 /* If we get here, the bit-field store doesn't allow memory
2203 or isn't located at a constant position. Load the value into
2204 a register, do the store, and put it back into memory. */
2206 tem1 = gen_reg_rtx (GET_MODE (tem));
2207 emit_insn_before (gen_move_insn (tem1, tem), insn);
2208 emit_insn_after (gen_move_insn (tem, tem1), insn);
2209 XEXP (outerdest, 0) = tem1;
2210 return;
2212 #endif
2214 /* STRICT_LOW_PART is a no-op on memory references
2215 and it can cause combinations to be unrecognizable,
2216 so eliminate it. */
2218 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2219 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2221 /* A valid insn to copy VAR into or out of a register
2222 must be left alone, to avoid an infinite loop here.
2223 If the reference to VAR is by a subreg, fix that up,
2224 since SUBREG is not valid for a memref.
2225 Also fix up the address of the stack slot.
2227 Note that we must not try to recognize the insn until
2228 after we know that we have valid addresses and no
2229 (subreg (mem ...) ...) constructs, since these interfere
2230 with determining the validity of the insn. */
2232 if ((SET_SRC (x) == var
2233 || (GET_CODE (SET_SRC (x)) == SUBREG
2234 && SUBREG_REG (SET_SRC (x)) == var))
2235 && (GET_CODE (SET_DEST (x)) == REG
2236 || (GET_CODE (SET_DEST (x)) == SUBREG
2237 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2238 && GET_MODE (var) == promoted_mode
2239 && x == single_set (insn))
2241 rtx pat;
2243 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2244 if (replacement->new)
2245 SET_SRC (x) = replacement->new;
2246 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2247 SET_SRC (x) = replacement->new
2248 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2249 else
2250 SET_SRC (x) = replacement->new
2251 = fixup_stack_1 (SET_SRC (x), insn);
2253 if (recog_memoized (insn) >= 0)
2254 return;
2256 /* INSN is not valid, but we know that we want to
2257 copy SET_SRC (x) to SET_DEST (x) in some way. So
2258 we generate the move and see whether it requires more
2259 than one insn. If it does, we emit those insns and
2260 delete INSN. Otherwise, we an just replace the pattern
2261 of INSN; we have already verified above that INSN has
2262 no other function that to do X. */
2264 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2265 if (GET_CODE (pat) == SEQUENCE)
2267 emit_insn_after (pat, insn);
2268 PUT_CODE (insn, NOTE);
2269 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2270 NOTE_SOURCE_FILE (insn) = 0;
2272 else
2273 PATTERN (insn) = pat;
2275 return;
2278 if ((SET_DEST (x) == var
2279 || (GET_CODE (SET_DEST (x)) == SUBREG
2280 && SUBREG_REG (SET_DEST (x)) == var))
2281 && (GET_CODE (SET_SRC (x)) == REG
2282 || (GET_CODE (SET_SRC (x)) == SUBREG
2283 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2284 && GET_MODE (var) == promoted_mode
2285 && x == single_set (insn))
2287 rtx pat;
2289 if (GET_CODE (SET_DEST (x)) == SUBREG)
2290 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2291 else
2292 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2294 if (recog_memoized (insn) >= 0)
2295 return;
2297 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2298 if (GET_CODE (pat) == SEQUENCE)
2300 emit_insn_after (pat, insn);
2301 PUT_CODE (insn, NOTE);
2302 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2303 NOTE_SOURCE_FILE (insn) = 0;
2305 else
2306 PATTERN (insn) = pat;
2308 return;
2311 /* Otherwise, storing into VAR must be handled specially
2312 by storing into a temporary and copying that into VAR
2313 with a new insn after this one. Note that this case
2314 will be used when storing into a promoted scalar since
2315 the insn will now have different modes on the input
2316 and output and hence will be invalid (except for the case
2317 of setting it to a constant, which does not need any
2318 change if it is valid). We generate extra code in that case,
2319 but combine.c will eliminate it. */
2321 if (dest == var)
2323 rtx temp;
2324 rtx fixeddest = SET_DEST (x);
2326 /* STRICT_LOW_PART can be discarded, around a MEM. */
2327 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2328 fixeddest = XEXP (fixeddest, 0);
2329 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2330 if (GET_CODE (fixeddest) == SUBREG)
2332 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2333 promoted_mode = GET_MODE (fixeddest);
2335 else
2336 fixeddest = fixup_stack_1 (fixeddest, insn);
2338 temp = gen_reg_rtx (promoted_mode);
2340 emit_insn_after (gen_move_insn (fixeddest,
2341 gen_lowpart (GET_MODE (fixeddest),
2342 temp)),
2343 insn);
2345 SET_DEST (x) = temp;
2349 default:
2350 break;
2353 /* Nothing special about this RTX; fix its operands. */
2355 fmt = GET_RTX_FORMAT (code);
2356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2358 if (fmt[i] == 'e')
2359 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2360 else if (fmt[i] == 'E')
2362 register int j;
2363 for (j = 0; j < XVECLEN (x, i); j++)
2364 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2365 insn, replacements);
2370 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2371 return an rtx (MEM:m1 newaddr) which is equivalent.
2372 If any insns must be emitted to compute NEWADDR, put them before INSN.
2374 UNCRITICAL nonzero means accept paradoxical subregs.
2375 This is used for subregs found inside REG_NOTES. */
2377 static rtx
2378 fixup_memory_subreg (x, insn, uncritical)
2379 rtx x;
2380 rtx insn;
2381 int uncritical;
2383 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2384 rtx addr = XEXP (SUBREG_REG (x), 0);
2385 enum machine_mode mode = GET_MODE (x);
2386 rtx result;
2388 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2389 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2390 && ! uncritical)
2391 abort ();
2393 if (BYTES_BIG_ENDIAN)
2394 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2395 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2396 addr = plus_constant (addr, offset);
2397 if (!flag_force_addr && memory_address_p (mode, addr))
2398 /* Shortcut if no insns need be emitted. */
2399 return change_address (SUBREG_REG (x), mode, addr);
2400 start_sequence ();
2401 result = change_address (SUBREG_REG (x), mode, addr);
2402 emit_insn_before (gen_sequence (), insn);
2403 end_sequence ();
2404 return result;
2407 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2408 Replace subexpressions of X in place.
2409 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2410 Otherwise return X, with its contents possibly altered.
2412 If any insns must be emitted to compute NEWADDR, put them before INSN.
2414 UNCRITICAL is as in fixup_memory_subreg. */
2416 static rtx
2417 walk_fixup_memory_subreg (x, insn, uncritical)
2418 register rtx x;
2419 rtx insn;
2420 int uncritical;
2422 register enum rtx_code code;
2423 register const char *fmt;
2424 register int i;
2426 if (x == 0)
2427 return 0;
2429 code = GET_CODE (x);
2431 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2432 return fixup_memory_subreg (x, insn, uncritical);
2434 /* Nothing special about this RTX; fix its operands. */
2436 fmt = GET_RTX_FORMAT (code);
2437 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2439 if (fmt[i] == 'e')
2440 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2441 else if (fmt[i] == 'E')
2443 register int j;
2444 for (j = 0; j < XVECLEN (x, i); j++)
2445 XVECEXP (x, i, j)
2446 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2449 return x;
2452 /* For each memory ref within X, if it refers to a stack slot
2453 with an out of range displacement, put the address in a temp register
2454 (emitting new insns before INSN to load these registers)
2455 and alter the memory ref to use that register.
2456 Replace each such MEM rtx with a copy, to avoid clobberage. */
2458 static rtx
2459 fixup_stack_1 (x, insn)
2460 rtx x;
2461 rtx insn;
2463 register int i;
2464 register RTX_CODE code = GET_CODE (x);
2465 register const char *fmt;
2467 if (code == MEM)
2469 register rtx ad = XEXP (x, 0);
2470 /* If we have address of a stack slot but it's not valid
2471 (displacement is too large), compute the sum in a register. */
2472 if (GET_CODE (ad) == PLUS
2473 && GET_CODE (XEXP (ad, 0)) == REG
2474 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2475 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2476 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2477 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2478 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2479 #endif
2480 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2481 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2482 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2483 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2485 rtx temp, seq;
2486 if (memory_address_p (GET_MODE (x), ad))
2487 return x;
2489 start_sequence ();
2490 temp = copy_to_reg (ad);
2491 seq = gen_sequence ();
2492 end_sequence ();
2493 emit_insn_before (seq, insn);
2494 return change_address (x, VOIDmode, temp);
2496 return x;
2499 fmt = GET_RTX_FORMAT (code);
2500 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2502 if (fmt[i] == 'e')
2503 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2504 else if (fmt[i] == 'E')
2506 register int j;
2507 for (j = 0; j < XVECLEN (x, i); j++)
2508 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2511 return x;
2514 /* Optimization: a bit-field instruction whose field
2515 happens to be a byte or halfword in memory
2516 can be changed to a move instruction.
2518 We call here when INSN is an insn to examine or store into a bit-field.
2519 BODY is the SET-rtx to be altered.
2521 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2522 (Currently this is called only from function.c, and EQUIV_MEM
2523 is always 0.) */
2525 static void
2526 optimize_bit_field (body, insn, equiv_mem)
2527 rtx body;
2528 rtx insn;
2529 rtx *equiv_mem;
2531 register rtx bitfield;
2532 int destflag;
2533 rtx seq = 0;
2534 enum machine_mode mode;
2536 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2537 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2538 bitfield = SET_DEST (body), destflag = 1;
2539 else
2540 bitfield = SET_SRC (body), destflag = 0;
2542 /* First check that the field being stored has constant size and position
2543 and is in fact a byte or halfword suitably aligned. */
2545 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2546 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2547 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2548 != BLKmode)
2549 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2551 register rtx memref = 0;
2553 /* Now check that the containing word is memory, not a register,
2554 and that it is safe to change the machine mode. */
2556 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2557 memref = XEXP (bitfield, 0);
2558 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2559 && equiv_mem != 0)
2560 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2561 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2562 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2563 memref = SUBREG_REG (XEXP (bitfield, 0));
2564 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2565 && equiv_mem != 0
2566 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2567 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2569 if (memref
2570 && ! mode_dependent_address_p (XEXP (memref, 0))
2571 && ! MEM_VOLATILE_P (memref))
2573 /* Now adjust the address, first for any subreg'ing
2574 that we are now getting rid of,
2575 and then for which byte of the word is wanted. */
2577 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2578 rtx insns;
2580 /* Adjust OFFSET to count bits from low-address byte. */
2581 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2582 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2583 - offset - INTVAL (XEXP (bitfield, 1)));
2585 /* Adjust OFFSET to count bytes from low-address byte. */
2586 offset /= BITS_PER_UNIT;
2587 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2589 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2590 if (BYTES_BIG_ENDIAN)
2591 offset -= (MIN (UNITS_PER_WORD,
2592 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2593 - MIN (UNITS_PER_WORD,
2594 GET_MODE_SIZE (GET_MODE (memref))));
2597 start_sequence ();
2598 memref = change_address (memref, mode,
2599 plus_constant (XEXP (memref, 0), offset));
2600 insns = get_insns ();
2601 end_sequence ();
2602 emit_insns_before (insns, insn);
2604 /* Store this memory reference where
2605 we found the bit field reference. */
2607 if (destflag)
2609 validate_change (insn, &SET_DEST (body), memref, 1);
2610 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2612 rtx src = SET_SRC (body);
2613 while (GET_CODE (src) == SUBREG
2614 && SUBREG_WORD (src) == 0)
2615 src = SUBREG_REG (src);
2616 if (GET_MODE (src) != GET_MODE (memref))
2617 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2618 validate_change (insn, &SET_SRC (body), src, 1);
2620 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2621 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2622 /* This shouldn't happen because anything that didn't have
2623 one of these modes should have got converted explicitly
2624 and then referenced through a subreg.
2625 This is so because the original bit-field was
2626 handled by agg_mode and so its tree structure had
2627 the same mode that memref now has. */
2628 abort ();
2630 else
2632 rtx dest = SET_DEST (body);
2634 while (GET_CODE (dest) == SUBREG
2635 && SUBREG_WORD (dest) == 0
2636 && (GET_MODE_CLASS (GET_MODE (dest))
2637 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2638 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2639 <= UNITS_PER_WORD))
2640 dest = SUBREG_REG (dest);
2642 validate_change (insn, &SET_DEST (body), dest, 1);
2644 if (GET_MODE (dest) == GET_MODE (memref))
2645 validate_change (insn, &SET_SRC (body), memref, 1);
2646 else
2648 /* Convert the mem ref to the destination mode. */
2649 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2651 start_sequence ();
2652 convert_move (newreg, memref,
2653 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2654 seq = get_insns ();
2655 end_sequence ();
2657 validate_change (insn, &SET_SRC (body), newreg, 1);
2661 /* See if we can convert this extraction or insertion into
2662 a simple move insn. We might not be able to do so if this
2663 was, for example, part of a PARALLEL.
2665 If we succeed, write out any needed conversions. If we fail,
2666 it is hard to guess why we failed, so don't do anything
2667 special; just let the optimization be suppressed. */
2669 if (apply_change_group () && seq)
2670 emit_insns_before (seq, insn);
2675 /* These routines are responsible for converting virtual register references
2676 to the actual hard register references once RTL generation is complete.
2678 The following four variables are used for communication between the
2679 routines. They contain the offsets of the virtual registers from their
2680 respective hard registers. */
2682 static int in_arg_offset;
2683 static int var_offset;
2684 static int dynamic_offset;
2685 static int out_arg_offset;
2686 static int cfa_offset;
2688 /* In most machines, the stack pointer register is equivalent to the bottom
2689 of the stack. */
2691 #ifndef STACK_POINTER_OFFSET
2692 #define STACK_POINTER_OFFSET 0
2693 #endif
2695 /* If not defined, pick an appropriate default for the offset of dynamically
2696 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2697 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2699 #ifndef STACK_DYNAMIC_OFFSET
2701 #ifdef ACCUMULATE_OUTGOING_ARGS
2702 /* The bottom of the stack points to the actual arguments. If
2703 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2704 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2705 stack space for register parameters is not pushed by the caller, but
2706 rather part of the fixed stack areas and hence not included in
2707 `current_function_outgoing_args_size'. Nevertheless, we must allow
2708 for it when allocating stack dynamic objects. */
2710 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2711 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2712 (current_function_outgoing_args_size \
2713 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2715 #else
2716 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2717 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2718 #endif
2720 #else
2721 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2722 #endif
2723 #endif
2725 /* On a few machines, the CFA coincides with the arg pointer. */
2727 #ifndef ARG_POINTER_CFA_OFFSET
2728 #define ARG_POINTER_CFA_OFFSET 0
2729 #endif
2732 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2733 its address taken. DECL is the decl for the object stored in the
2734 register, for later use if we do need to force REG into the stack.
2735 REG is overwritten by the MEM like in put_reg_into_stack. */
2738 gen_mem_addressof (reg, decl)
2739 rtx reg;
2740 tree decl;
2742 tree type = TREE_TYPE (decl);
2743 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2744 REGNO (reg), decl);
2746 /* If the original REG was a user-variable, then so is the REG whose
2747 address is being taken. Likewise for unchanging. */
2748 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2749 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2751 PUT_CODE (reg, MEM);
2752 PUT_MODE (reg, DECL_MODE (decl));
2753 XEXP (reg, 0) = r;
2754 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2755 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2756 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2758 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2759 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2761 return reg;
2764 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2766 #if 0
2767 void
2768 flush_addressof (decl)
2769 tree decl;
2771 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2772 && DECL_RTL (decl) != 0
2773 && GET_CODE (DECL_RTL (decl)) == MEM
2774 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2775 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2776 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2778 #endif
2780 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2782 static void
2783 put_addressof_into_stack (r, ht)
2784 rtx r;
2785 struct hash_table *ht;
2787 tree decl = ADDRESSOF_DECL (r);
2788 rtx reg = XEXP (r, 0);
2790 if (GET_CODE (reg) != REG)
2791 abort ();
2793 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2794 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2795 ADDRESSOF_REGNO (r),
2796 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2799 /* List of replacements made below in purge_addressof_1 when creating
2800 bitfield insertions. */
2801 static rtx purge_bitfield_addressof_replacements;
2803 /* List of replacements made below in purge_addressof_1 for patterns
2804 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2805 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2806 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2807 enough in complex cases, e.g. when some field values can be
2808 extracted by usage MEM with narrower mode. */
2809 static rtx purge_addressof_replacements;
2811 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2812 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2813 the stack. If the function returns FALSE then the replacement could not
2814 be made. */
2816 static boolean
2817 purge_addressof_1 (loc, insn, force, store, ht)
2818 rtx *loc;
2819 rtx insn;
2820 int force, store;
2821 struct hash_table *ht;
2823 rtx x;
2824 RTX_CODE code;
2825 int i, j;
2826 const char *fmt;
2827 boolean result = true;
2829 /* Re-start here to avoid recursion in common cases. */
2830 restart:
2832 x = *loc;
2833 if (x == 0)
2834 return true;
2836 code = GET_CODE (x);
2838 /* If we don't return in any of the cases below, we will recurse inside
2839 the RTX, which will normally result in any ADDRESSOF being forced into
2840 memory. */
2841 if (code == SET)
2843 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2844 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2845 return result;
2848 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2850 /* We must create a copy of the rtx because it was created by
2851 overwriting a REG rtx which is always shared. */
2852 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2853 rtx insns;
2855 if (validate_change (insn, loc, sub, 0)
2856 || validate_replace_rtx (x, sub, insn))
2857 return true;
2859 start_sequence ();
2860 sub = force_operand (sub, NULL_RTX);
2861 if (! validate_change (insn, loc, sub, 0)
2862 && ! validate_replace_rtx (x, sub, insn))
2863 abort ();
2865 insns = gen_sequence ();
2866 end_sequence ();
2867 emit_insn_before (insns, insn);
2868 return true;
2871 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2873 rtx sub = XEXP (XEXP (x, 0), 0);
2874 rtx sub2;
2876 if (GET_CODE (sub) == MEM)
2878 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2879 MEM_COPY_ATTRIBUTES (sub2, sub);
2880 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2881 sub = sub2;
2883 else if (GET_CODE (sub) == REG
2884 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2886 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2888 int size_x, size_sub;
2890 if (!insn)
2892 /* When processing REG_NOTES look at the list of
2893 replacements done on the insn to find the register that X
2894 was replaced by. */
2895 rtx tem;
2897 for (tem = purge_bitfield_addressof_replacements;
2898 tem != NULL_RTX;
2899 tem = XEXP (XEXP (tem, 1), 1))
2900 if (rtx_equal_p (x, XEXP (tem, 0)))
2902 *loc = XEXP (XEXP (tem, 1), 0);
2903 return true;
2906 /* See comment for purge_addressof_replacements. */
2907 for (tem = purge_addressof_replacements;
2908 tem != NULL_RTX;
2909 tem = XEXP (XEXP (tem, 1), 1))
2910 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2912 rtx z = XEXP (XEXP (tem, 1), 0);
2914 if (GET_MODE (x) == GET_MODE (z)
2915 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2916 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2917 abort ();
2919 /* It can happen that the note may speak of things
2920 in a wider (or just different) mode than the
2921 code did. This is especially true of
2922 REG_RETVAL. */
2924 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2925 z = SUBREG_REG (z);
2927 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2928 && (GET_MODE_SIZE (GET_MODE (x))
2929 > GET_MODE_SIZE (GET_MODE (z))))
2931 /* This can occur as a result in invalid
2932 pointer casts, e.g. float f; ...
2933 *(long long int *)&f.
2934 ??? We could emit a warning here, but
2935 without a line number that wouldn't be
2936 very helpful. */
2937 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2939 else
2940 z = gen_lowpart (GET_MODE (x), z);
2942 *loc = z;
2943 return true;
2946 /* Sometimes we may not be able to find the replacement. For
2947 example when the original insn was a MEM in a wider mode,
2948 and the note is part of a sign extension of a narrowed
2949 version of that MEM. Gcc testcase compile/990829-1.c can
2950 generate an example of this siutation. Rather than complain
2951 we return false, which will prompt our caller to remove the
2952 offending note. */
2953 return false;
2956 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2957 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2959 /* Don't even consider working with paradoxical subregs,
2960 or the moral equivalent seen here. */
2961 if (size_x <= size_sub
2962 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2964 /* Do a bitfield insertion to mirror what would happen
2965 in memory. */
2967 rtx val, seq;
2969 if (store)
2971 rtx p = PREV_INSN (insn);
2973 start_sequence ();
2974 val = gen_reg_rtx (GET_MODE (x));
2975 if (! validate_change (insn, loc, val, 0))
2977 /* Discard the current sequence and put the
2978 ADDRESSOF on stack. */
2979 end_sequence ();
2980 goto give_up;
2982 seq = gen_sequence ();
2983 end_sequence ();
2984 emit_insn_before (seq, insn);
2985 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2986 insn, ht);
2988 start_sequence ();
2989 store_bit_field (sub, size_x, 0, GET_MODE (x),
2990 val, GET_MODE_SIZE (GET_MODE (sub)),
2991 GET_MODE_SIZE (GET_MODE (sub)));
2993 /* Make sure to unshare any shared rtl that store_bit_field
2994 might have created. */
2995 unshare_all_rtl_again (get_insns ());
2997 seq = gen_sequence ();
2998 end_sequence ();
2999 p = emit_insn_after (seq, insn);
3000 if (NEXT_INSN (insn))
3001 compute_insns_for_mem (NEXT_INSN (insn),
3002 p ? NEXT_INSN (p) : NULL_RTX,
3003 ht);
3005 else
3007 rtx p = PREV_INSN (insn);
3009 start_sequence ();
3010 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3011 GET_MODE (x), GET_MODE (x),
3012 GET_MODE_SIZE (GET_MODE (sub)),
3013 GET_MODE_SIZE (GET_MODE (sub)));
3015 if (! validate_change (insn, loc, val, 0))
3017 /* Discard the current sequence and put the
3018 ADDRESSOF on stack. */
3019 end_sequence ();
3020 goto give_up;
3023 seq = gen_sequence ();
3024 end_sequence ();
3025 emit_insn_before (seq, insn);
3026 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3027 insn, ht);
3030 /* Remember the replacement so that the same one can be done
3031 on the REG_NOTES. */
3032 purge_bitfield_addressof_replacements
3033 = gen_rtx_EXPR_LIST (VOIDmode, x,
3034 gen_rtx_EXPR_LIST
3035 (VOIDmode, val,
3036 purge_bitfield_addressof_replacements));
3038 /* We replaced with a reg -- all done. */
3039 return true;
3043 else if (validate_change (insn, loc, sub, 0))
3045 /* Remember the replacement so that the same one can be done
3046 on the REG_NOTES. */
3047 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3049 rtx tem;
3051 for (tem = purge_addressof_replacements;
3052 tem != NULL_RTX;
3053 tem = XEXP (XEXP (tem, 1), 1))
3054 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3056 XEXP (XEXP (tem, 1), 0) = sub;
3057 return true;
3059 purge_addressof_replacements
3060 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3061 gen_rtx_EXPR_LIST (VOIDmode, sub,
3062 purge_addressof_replacements));
3063 return true;
3065 goto restart;
3067 give_up:;
3068 /* else give up and put it into the stack */
3071 else if (code == ADDRESSOF)
3073 put_addressof_into_stack (x, ht);
3074 return true;
3076 else if (code == SET)
3078 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3079 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3080 return result;
3083 /* Scan all subexpressions. */
3084 fmt = GET_RTX_FORMAT (code);
3085 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3087 if (*fmt == 'e')
3088 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3089 else if (*fmt == 'E')
3090 for (j = 0; j < XVECLEN (x, i); j++)
3091 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3094 return result;
3097 /* Return a new hash table entry in HT. */
3099 static struct hash_entry *
3100 insns_for_mem_newfunc (he, ht, k)
3101 struct hash_entry *he;
3102 struct hash_table *ht;
3103 hash_table_key k ATTRIBUTE_UNUSED;
3105 struct insns_for_mem_entry *ifmhe;
3106 if (he)
3107 return he;
3109 ifmhe = ((struct insns_for_mem_entry *)
3110 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3111 ifmhe->insns = NULL_RTX;
3113 return &ifmhe->he;
3116 /* Return a hash value for K, a REG. */
3118 static unsigned long
3119 insns_for_mem_hash (k)
3120 hash_table_key k;
3122 /* K is really a RTX. Just use the address as the hash value. */
3123 return (unsigned long) k;
3126 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3128 static boolean
3129 insns_for_mem_comp (k1, k2)
3130 hash_table_key k1;
3131 hash_table_key k2;
3133 return k1 == k2;
3136 struct insns_for_mem_walk_info {
3137 /* The hash table that we are using to record which INSNs use which
3138 MEMs. */
3139 struct hash_table *ht;
3141 /* The INSN we are currently proessing. */
3142 rtx insn;
3144 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3145 to find the insns that use the REGs in the ADDRESSOFs. */
3146 int pass;
3149 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3150 that might be used in an ADDRESSOF expression, record this INSN in
3151 the hash table given by DATA (which is really a pointer to an
3152 insns_for_mem_walk_info structure). */
3154 static int
3155 insns_for_mem_walk (r, data)
3156 rtx *r;
3157 void *data;
3159 struct insns_for_mem_walk_info *ifmwi
3160 = (struct insns_for_mem_walk_info *) data;
3162 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3163 && GET_CODE (XEXP (*r, 0)) == REG)
3164 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3165 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3167 /* Lookup this MEM in the hashtable, creating it if necessary. */
3168 struct insns_for_mem_entry *ifme
3169 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3171 /*create=*/0,
3172 /*copy=*/0);
3174 /* If we have not already recorded this INSN, do so now. Since
3175 we process the INSNs in order, we know that if we have
3176 recorded it it must be at the front of the list. */
3177 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3179 /* We do the allocation on the same obstack as is used for
3180 the hash table since this memory will not be used once
3181 the hash table is deallocated. */
3182 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3183 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3184 ifme->insns);
3185 pop_obstacks ();
3189 return 0;
3192 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3193 which REGs in HT. */
3195 static void
3196 compute_insns_for_mem (insns, last_insn, ht)
3197 rtx insns;
3198 rtx last_insn;
3199 struct hash_table *ht;
3201 rtx insn;
3202 struct insns_for_mem_walk_info ifmwi;
3203 ifmwi.ht = ht;
3205 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3206 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3207 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3209 ifmwi.insn = insn;
3210 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3214 /* Helper function for purge_addressof called through for_each_rtx.
3215 Returns true iff the rtl is an ADDRESSOF. */
3216 static int
3217 is_addressof (rtl, data)
3218 rtx * rtl;
3219 void * data ATTRIBUTE_UNUSED;
3221 return GET_CODE (* rtl) == ADDRESSOF;
3224 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3225 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3226 stack. */
3228 void
3229 purge_addressof (insns)
3230 rtx insns;
3232 rtx insn;
3233 struct hash_table ht;
3235 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3236 requires a fixup pass over the instruction stream to correct
3237 INSNs that depended on the REG being a REG, and not a MEM. But,
3238 these fixup passes are slow. Furthermore, more MEMs are not
3239 mentioned in very many instructions. So, we speed up the process
3240 by pre-calculating which REGs occur in which INSNs; that allows
3241 us to perform the fixup passes much more quickly. */
3242 hash_table_init (&ht,
3243 insns_for_mem_newfunc,
3244 insns_for_mem_hash,
3245 insns_for_mem_comp);
3246 compute_insns_for_mem (insns, NULL_RTX, &ht);
3248 for (insn = insns; insn; insn = NEXT_INSN (insn))
3249 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3250 || GET_CODE (insn) == CALL_INSN)
3252 if (! purge_addressof_1 (&PATTERN (insn), insn,
3253 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3254 /* If we could not replace the ADDRESSOFs in the insn,
3255 something is wrong. */
3256 abort ();
3258 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3260 /* If we could not replace the ADDRESSOFs in the insn's notes,
3261 we can just remove the offending notes instead. */
3262 rtx note;
3264 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3266 /* If we find a REG_RETVAL note then the insn is a libcall.
3267 Such insns must have REG_EQUAL notes as well, in order
3268 for later passes of the compiler to work. So it is not
3269 safe to delete the notes here, and instead we abort. */
3270 if (REG_NOTE_KIND (note) == REG_RETVAL)
3271 abort ();
3272 if (for_each_rtx (& note, is_addressof, NULL))
3273 remove_note (insn, note);
3278 /* Clean up. */
3279 hash_table_free (&ht);
3280 purge_bitfield_addressof_replacements = 0;
3281 purge_addressof_replacements = 0;
3284 /* Pass through the INSNS of function FNDECL and convert virtual register
3285 references to hard register references. */
3287 void
3288 instantiate_virtual_regs (fndecl, insns)
3289 tree fndecl;
3290 rtx insns;
3292 rtx insn;
3293 int i;
3295 /* Compute the offsets to use for this function. */
3296 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3297 var_offset = STARTING_FRAME_OFFSET;
3298 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3299 out_arg_offset = STACK_POINTER_OFFSET;
3300 cfa_offset = ARG_POINTER_CFA_OFFSET;
3302 /* Scan all variables and parameters of this function. For each that is
3303 in memory, instantiate all virtual registers if the result is a valid
3304 address. If not, we do it later. That will handle most uses of virtual
3305 regs on many machines. */
3306 instantiate_decls (fndecl, 1);
3308 /* Initialize recognition, indicating that volatile is OK. */
3309 init_recog ();
3311 /* Scan through all the insns, instantiating every virtual register still
3312 present. */
3313 for (insn = insns; insn; insn = NEXT_INSN (insn))
3314 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3315 || GET_CODE (insn) == CALL_INSN)
3317 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3318 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3321 /* Instantiate the stack slots for the parm registers, for later use in
3322 addressof elimination. */
3323 for (i = 0; i < max_parm_reg; ++i)
3324 if (parm_reg_stack_loc[i])
3325 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3327 /* Now instantiate the remaining register equivalences for debugging info.
3328 These will not be valid addresses. */
3329 instantiate_decls (fndecl, 0);
3331 /* Indicate that, from now on, assign_stack_local should use
3332 frame_pointer_rtx. */
3333 virtuals_instantiated = 1;
3336 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3337 all virtual registers in their DECL_RTL's.
3339 If VALID_ONLY, do this only if the resulting address is still valid.
3340 Otherwise, always do it. */
3342 static void
3343 instantiate_decls (fndecl, valid_only)
3344 tree fndecl;
3345 int valid_only;
3347 tree decl;
3349 if (DECL_SAVED_INSNS (fndecl))
3350 /* When compiling an inline function, the obstack used for
3351 rtl allocation is the maybepermanent_obstack. Calling
3352 `resume_temporary_allocation' switches us back to that
3353 obstack while we process this function's parameters. */
3354 resume_temporary_allocation ();
3356 /* Process all parameters of the function. */
3357 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3359 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3361 instantiate_decl (DECL_RTL (decl), size, valid_only);
3363 /* If the parameter was promoted, then the incoming RTL mode may be
3364 larger than the declared type size. We must use the larger of
3365 the two sizes. */
3366 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3367 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3370 /* Now process all variables defined in the function or its subblocks. */
3371 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3373 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3375 /* Save all rtl allocated for this function by raising the
3376 high-water mark on the maybepermanent_obstack. */
3377 preserve_data ();
3378 /* All further rtl allocation is now done in the current_obstack. */
3379 rtl_in_current_obstack ();
3383 /* Subroutine of instantiate_decls: Process all decls in the given
3384 BLOCK node and all its subblocks. */
3386 static void
3387 instantiate_decls_1 (let, valid_only)
3388 tree let;
3389 int valid_only;
3391 tree t;
3393 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3394 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3395 valid_only);
3397 /* Process all subblocks. */
3398 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3399 instantiate_decls_1 (t, valid_only);
3402 /* Subroutine of the preceding procedures: Given RTL representing a
3403 decl and the size of the object, do any instantiation required.
3405 If VALID_ONLY is non-zero, it means that the RTL should only be
3406 changed if the new address is valid. */
3408 static void
3409 instantiate_decl (x, size, valid_only)
3410 rtx x;
3411 int size;
3412 int valid_only;
3414 enum machine_mode mode;
3415 rtx addr;
3417 /* If this is not a MEM, no need to do anything. Similarly if the
3418 address is a constant or a register that is not a virtual register. */
3420 if (x == 0 || GET_CODE (x) != MEM)
3421 return;
3423 addr = XEXP (x, 0);
3424 if (CONSTANT_P (addr)
3425 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3426 || (GET_CODE (addr) == REG
3427 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3428 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3429 return;
3431 /* If we should only do this if the address is valid, copy the address.
3432 We need to do this so we can undo any changes that might make the
3433 address invalid. This copy is unfortunate, but probably can't be
3434 avoided. */
3436 if (valid_only)
3437 addr = copy_rtx (addr);
3439 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3441 if (valid_only)
3443 /* Now verify that the resulting address is valid for every integer or
3444 floating-point mode up to and including SIZE bytes long. We do this
3445 since the object might be accessed in any mode and frame addresses
3446 are shared. */
3448 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3449 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3450 mode = GET_MODE_WIDER_MODE (mode))
3451 if (! memory_address_p (mode, addr))
3452 return;
3454 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3455 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3456 mode = GET_MODE_WIDER_MODE (mode))
3457 if (! memory_address_p (mode, addr))
3458 return;
3461 /* Put back the address now that we have updated it and we either know
3462 it is valid or we don't care whether it is valid. */
3464 XEXP (x, 0) = addr;
3467 /* Given a pointer to a piece of rtx and an optional pointer to the
3468 containing object, instantiate any virtual registers present in it.
3470 If EXTRA_INSNS, we always do the replacement and generate
3471 any extra insns before OBJECT. If it zero, we do nothing if replacement
3472 is not valid.
3474 Return 1 if we either had nothing to do or if we were able to do the
3475 needed replacement. Return 0 otherwise; we only return zero if
3476 EXTRA_INSNS is zero.
3478 We first try some simple transformations to avoid the creation of extra
3479 pseudos. */
3481 static int
3482 instantiate_virtual_regs_1 (loc, object, extra_insns)
3483 rtx *loc;
3484 rtx object;
3485 int extra_insns;
3487 rtx x;
3488 RTX_CODE code;
3489 rtx new = 0;
3490 HOST_WIDE_INT offset = 0;
3491 rtx temp;
3492 rtx seq;
3493 int i, j;
3494 const char *fmt;
3496 /* Re-start here to avoid recursion in common cases. */
3497 restart:
3499 x = *loc;
3500 if (x == 0)
3501 return 1;
3503 code = GET_CODE (x);
3505 /* Check for some special cases. */
3506 switch (code)
3508 case CONST_INT:
3509 case CONST_DOUBLE:
3510 case CONST:
3511 case SYMBOL_REF:
3512 case CODE_LABEL:
3513 case PC:
3514 case CC0:
3515 case ASM_INPUT:
3516 case ADDR_VEC:
3517 case ADDR_DIFF_VEC:
3518 case RETURN:
3519 return 1;
3521 case SET:
3522 /* We are allowed to set the virtual registers. This means that
3523 the actual register should receive the source minus the
3524 appropriate offset. This is used, for example, in the handling
3525 of non-local gotos. */
3526 if (SET_DEST (x) == virtual_incoming_args_rtx)
3527 new = arg_pointer_rtx, offset = - in_arg_offset;
3528 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3529 new = frame_pointer_rtx, offset = - var_offset;
3530 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3531 new = stack_pointer_rtx, offset = - dynamic_offset;
3532 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3533 new = stack_pointer_rtx, offset = - out_arg_offset;
3534 else if (SET_DEST (x) == virtual_cfa_rtx)
3535 new = arg_pointer_rtx, offset = - cfa_offset;
3537 if (new)
3539 rtx src = SET_SRC (x);
3541 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3543 /* The only valid sources here are PLUS or REG. Just do
3544 the simplest possible thing to handle them. */
3545 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3546 abort ();
3548 start_sequence ();
3549 if (GET_CODE (src) != REG)
3550 temp = force_operand (src, NULL_RTX);
3551 else
3552 temp = src;
3553 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3554 seq = get_insns ();
3555 end_sequence ();
3557 emit_insns_before (seq, object);
3558 SET_DEST (x) = new;
3560 if (! validate_change (object, &SET_SRC (x), temp, 0)
3561 || ! extra_insns)
3562 abort ();
3564 return 1;
3567 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3568 loc = &SET_SRC (x);
3569 goto restart;
3571 case PLUS:
3572 /* Handle special case of virtual register plus constant. */
3573 if (CONSTANT_P (XEXP (x, 1)))
3575 rtx old, new_offset;
3577 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3578 if (GET_CODE (XEXP (x, 0)) == PLUS)
3580 rtx inner = XEXP (XEXP (x, 0), 0);
3582 if (inner == virtual_incoming_args_rtx)
3583 new = arg_pointer_rtx, offset = in_arg_offset;
3584 else if (inner == virtual_stack_vars_rtx)
3585 new = frame_pointer_rtx, offset = var_offset;
3586 else if (inner == virtual_stack_dynamic_rtx)
3587 new = stack_pointer_rtx, offset = dynamic_offset;
3588 else if (inner == virtual_outgoing_args_rtx)
3589 new = stack_pointer_rtx, offset = out_arg_offset;
3590 else if (inner == virtual_cfa_rtx)
3591 new = arg_pointer_rtx, offset = cfa_offset;
3592 else
3594 loc = &XEXP (x, 0);
3595 goto restart;
3598 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3599 extra_insns);
3600 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3603 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3604 new = arg_pointer_rtx, offset = in_arg_offset;
3605 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3606 new = frame_pointer_rtx, offset = var_offset;
3607 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3608 new = stack_pointer_rtx, offset = dynamic_offset;
3609 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3610 new = stack_pointer_rtx, offset = out_arg_offset;
3611 else if (XEXP (x, 0) == virtual_cfa_rtx)
3612 new = arg_pointer_rtx, offset = cfa_offset;
3613 else
3615 /* We know the second operand is a constant. Unless the
3616 first operand is a REG (which has been already checked),
3617 it needs to be checked. */
3618 if (GET_CODE (XEXP (x, 0)) != REG)
3620 loc = &XEXP (x, 0);
3621 goto restart;
3623 return 1;
3626 new_offset = plus_constant (XEXP (x, 1), offset);
3628 /* If the new constant is zero, try to replace the sum with just
3629 the register. */
3630 if (new_offset == const0_rtx
3631 && validate_change (object, loc, new, 0))
3632 return 1;
3634 /* Next try to replace the register and new offset.
3635 There are two changes to validate here and we can't assume that
3636 in the case of old offset equals new just changing the register
3637 will yield a valid insn. In the interests of a little efficiency,
3638 however, we only call validate change once (we don't queue up the
3639 changes and then call apply_change_group). */
3641 old = XEXP (x, 0);
3642 if (offset == 0
3643 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3644 : (XEXP (x, 0) = new,
3645 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3647 if (! extra_insns)
3649 XEXP (x, 0) = old;
3650 return 0;
3653 /* Otherwise copy the new constant into a register and replace
3654 constant with that register. */
3655 temp = gen_reg_rtx (Pmode);
3656 XEXP (x, 0) = new;
3657 if (validate_change (object, &XEXP (x, 1), temp, 0))
3658 emit_insn_before (gen_move_insn (temp, new_offset), object);
3659 else
3661 /* If that didn't work, replace this expression with a
3662 register containing the sum. */
3664 XEXP (x, 0) = old;
3665 new = gen_rtx_PLUS (Pmode, new, new_offset);
3667 start_sequence ();
3668 temp = force_operand (new, NULL_RTX);
3669 seq = get_insns ();
3670 end_sequence ();
3672 emit_insns_before (seq, object);
3673 if (! validate_change (object, loc, temp, 0)
3674 && ! validate_replace_rtx (x, temp, object))
3675 abort ();
3679 return 1;
3682 /* Fall through to generic two-operand expression case. */
3683 case EXPR_LIST:
3684 case CALL:
3685 case COMPARE:
3686 case MINUS:
3687 case MULT:
3688 case DIV: case UDIV:
3689 case MOD: case UMOD:
3690 case AND: case IOR: case XOR:
3691 case ROTATERT: case ROTATE:
3692 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3693 case NE: case EQ:
3694 case GE: case GT: case GEU: case GTU:
3695 case LE: case LT: case LEU: case LTU:
3696 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3697 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3698 loc = &XEXP (x, 0);
3699 goto restart;
3701 case MEM:
3702 /* Most cases of MEM that convert to valid addresses have already been
3703 handled by our scan of decls. The only special handling we
3704 need here is to make a copy of the rtx to ensure it isn't being
3705 shared if we have to change it to a pseudo.
3707 If the rtx is a simple reference to an address via a virtual register,
3708 it can potentially be shared. In such cases, first try to make it
3709 a valid address, which can also be shared. Otherwise, copy it and
3710 proceed normally.
3712 First check for common cases that need no processing. These are
3713 usually due to instantiation already being done on a previous instance
3714 of a shared rtx. */
3716 temp = XEXP (x, 0);
3717 if (CONSTANT_ADDRESS_P (temp)
3718 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3719 || temp == arg_pointer_rtx
3720 #endif
3721 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3722 || temp == hard_frame_pointer_rtx
3723 #endif
3724 || temp == frame_pointer_rtx)
3725 return 1;
3727 if (GET_CODE (temp) == PLUS
3728 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3729 && (XEXP (temp, 0) == frame_pointer_rtx
3730 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3731 || XEXP (temp, 0) == hard_frame_pointer_rtx
3732 #endif
3733 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3734 || XEXP (temp, 0) == arg_pointer_rtx
3735 #endif
3737 return 1;
3739 if (temp == virtual_stack_vars_rtx
3740 || temp == virtual_incoming_args_rtx
3741 || (GET_CODE (temp) == PLUS
3742 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3743 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3744 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3746 /* This MEM may be shared. If the substitution can be done without
3747 the need to generate new pseudos, we want to do it in place
3748 so all copies of the shared rtx benefit. The call below will
3749 only make substitutions if the resulting address is still
3750 valid.
3752 Note that we cannot pass X as the object in the recursive call
3753 since the insn being processed may not allow all valid
3754 addresses. However, if we were not passed on object, we can
3755 only modify X without copying it if X will have a valid
3756 address.
3758 ??? Also note that this can still lose if OBJECT is an insn that
3759 has less restrictions on an address that some other insn.
3760 In that case, we will modify the shared address. This case
3761 doesn't seem very likely, though. One case where this could
3762 happen is in the case of a USE or CLOBBER reference, but we
3763 take care of that below. */
3765 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3766 object ? object : x, 0))
3767 return 1;
3769 /* Otherwise make a copy and process that copy. We copy the entire
3770 RTL expression since it might be a PLUS which could also be
3771 shared. */
3772 *loc = x = copy_rtx (x);
3775 /* Fall through to generic unary operation case. */
3776 case SUBREG:
3777 case STRICT_LOW_PART:
3778 case NEG: case NOT:
3779 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3780 case SIGN_EXTEND: case ZERO_EXTEND:
3781 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3782 case FLOAT: case FIX:
3783 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3784 case ABS:
3785 case SQRT:
3786 case FFS:
3787 /* These case either have just one operand or we know that we need not
3788 check the rest of the operands. */
3789 loc = &XEXP (x, 0);
3790 goto restart;
3792 case USE:
3793 case CLOBBER:
3794 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3795 go ahead and make the invalid one, but do it to a copy. For a REG,
3796 just make the recursive call, since there's no chance of a problem. */
3798 if ((GET_CODE (XEXP (x, 0)) == MEM
3799 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3801 || (GET_CODE (XEXP (x, 0)) == REG
3802 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3803 return 1;
3805 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3806 loc = &XEXP (x, 0);
3807 goto restart;
3809 case REG:
3810 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3811 in front of this insn and substitute the temporary. */
3812 if (x == virtual_incoming_args_rtx)
3813 new = arg_pointer_rtx, offset = in_arg_offset;
3814 else if (x == virtual_stack_vars_rtx)
3815 new = frame_pointer_rtx, offset = var_offset;
3816 else if (x == virtual_stack_dynamic_rtx)
3817 new = stack_pointer_rtx, offset = dynamic_offset;
3818 else if (x == virtual_outgoing_args_rtx)
3819 new = stack_pointer_rtx, offset = out_arg_offset;
3820 else if (x == virtual_cfa_rtx)
3821 new = arg_pointer_rtx, offset = cfa_offset;
3823 if (new)
3825 temp = plus_constant (new, offset);
3826 if (!validate_change (object, loc, temp, 0))
3828 if (! extra_insns)
3829 return 0;
3831 start_sequence ();
3832 temp = force_operand (temp, NULL_RTX);
3833 seq = get_insns ();
3834 end_sequence ();
3836 emit_insns_before (seq, object);
3837 if (! validate_change (object, loc, temp, 0)
3838 && ! validate_replace_rtx (x, temp, object))
3839 abort ();
3843 return 1;
3845 case ADDRESSOF:
3846 if (GET_CODE (XEXP (x, 0)) == REG)
3847 return 1;
3849 else if (GET_CODE (XEXP (x, 0)) == MEM)
3851 /* If we have a (addressof (mem ..)), do any instantiation inside
3852 since we know we'll be making the inside valid when we finally
3853 remove the ADDRESSOF. */
3854 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3855 return 1;
3857 break;
3859 default:
3860 break;
3863 /* Scan all subexpressions. */
3864 fmt = GET_RTX_FORMAT (code);
3865 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3866 if (*fmt == 'e')
3868 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3869 return 0;
3871 else if (*fmt == 'E')
3872 for (j = 0; j < XVECLEN (x, i); j++)
3873 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3874 extra_insns))
3875 return 0;
3877 return 1;
3880 /* Optimization: assuming this function does not receive nonlocal gotos,
3881 delete the handlers for such, as well as the insns to establish
3882 and disestablish them. */
3884 static void
3885 delete_handlers ()
3887 rtx insn;
3888 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3890 /* Delete the handler by turning off the flag that would
3891 prevent jump_optimize from deleting it.
3892 Also permit deletion of the nonlocal labels themselves
3893 if nothing local refers to them. */
3894 if (GET_CODE (insn) == CODE_LABEL)
3896 tree t, last_t;
3898 LABEL_PRESERVE_P (insn) = 0;
3900 /* Remove it from the nonlocal_label list, to avoid confusing
3901 flow. */
3902 for (t = nonlocal_labels, last_t = 0; t;
3903 last_t = t, t = TREE_CHAIN (t))
3904 if (DECL_RTL (TREE_VALUE (t)) == insn)
3905 break;
3906 if (t)
3908 if (! last_t)
3909 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3910 else
3911 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3914 if (GET_CODE (insn) == INSN)
3916 int can_delete = 0;
3917 rtx t;
3918 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3919 if (reg_mentioned_p (t, PATTERN (insn)))
3921 can_delete = 1;
3922 break;
3924 if (can_delete
3925 || (nonlocal_goto_stack_level != 0
3926 && reg_mentioned_p (nonlocal_goto_stack_level,
3927 PATTERN (insn))))
3928 delete_insn (insn);
3934 max_parm_reg_num ()
3936 return max_parm_reg;
3939 /* Return the first insn following those generated by `assign_parms'. */
3942 get_first_nonparm_insn ()
3944 if (last_parm_insn)
3945 return NEXT_INSN (last_parm_insn);
3946 return get_insns ();
3949 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3950 Crash if there is none. */
3953 get_first_block_beg ()
3955 register rtx searcher;
3956 register rtx insn = get_first_nonparm_insn ();
3958 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3959 if (GET_CODE (searcher) == NOTE
3960 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3961 return searcher;
3963 abort (); /* Invalid call to this function. (See comments above.) */
3964 return NULL_RTX;
3967 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3968 This means a type for which function calls must pass an address to the
3969 function or get an address back from the function.
3970 EXP may be a type node or an expression (whose type is tested). */
3973 aggregate_value_p (exp)
3974 tree exp;
3976 int i, regno, nregs;
3977 rtx reg;
3978 tree type;
3979 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3980 type = exp;
3981 else
3982 type = TREE_TYPE (exp);
3984 if (RETURN_IN_MEMORY (type))
3985 return 1;
3986 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3987 and thus can't be returned in registers. */
3988 if (TREE_ADDRESSABLE (type))
3989 return 1;
3990 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3991 return 1;
3992 /* Make sure we have suitable call-clobbered regs to return
3993 the value in; if not, we must return it in memory. */
3994 reg = hard_function_value (type, 0, 0);
3996 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3997 it is OK. */
3998 if (GET_CODE (reg) != REG)
3999 return 0;
4001 regno = REGNO (reg);
4002 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4003 for (i = 0; i < nregs; i++)
4004 if (! call_used_regs[regno + i])
4005 return 1;
4006 return 0;
4009 /* Assign RTL expressions to the function's parameters.
4010 This may involve copying them into registers and using
4011 those registers as the RTL for them. */
4013 void
4014 assign_parms (fndecl)
4015 tree fndecl;
4017 register tree parm;
4018 register rtx entry_parm = 0;
4019 register rtx stack_parm = 0;
4020 CUMULATIVE_ARGS args_so_far;
4021 enum machine_mode promoted_mode, passed_mode;
4022 enum machine_mode nominal_mode, promoted_nominal_mode;
4023 int unsignedp;
4024 /* Total space needed so far for args on the stack,
4025 given as a constant and a tree-expression. */
4026 struct args_size stack_args_size;
4027 tree fntype = TREE_TYPE (fndecl);
4028 tree fnargs = DECL_ARGUMENTS (fndecl);
4029 /* This is used for the arg pointer when referring to stack args. */
4030 rtx internal_arg_pointer;
4031 /* This is a dummy PARM_DECL that we used for the function result if
4032 the function returns a structure. */
4033 tree function_result_decl = 0;
4034 #ifdef SETUP_INCOMING_VARARGS
4035 int varargs_setup = 0;
4036 #endif
4037 rtx conversion_insns = 0;
4038 struct args_size alignment_pad;
4040 /* Nonzero if the last arg is named `__builtin_va_alist',
4041 which is used on some machines for old-fashioned non-ANSI varargs.h;
4042 this should be stuck onto the stack as if it had arrived there. */
4043 int hide_last_arg
4044 = (current_function_varargs
4045 && fnargs
4046 && (parm = tree_last (fnargs)) != 0
4047 && DECL_NAME (parm)
4048 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4049 "__builtin_va_alist")));
4051 /* Nonzero if function takes extra anonymous args.
4052 This means the last named arg must be on the stack
4053 right before the anonymous ones. */
4054 int stdarg
4055 = (TYPE_ARG_TYPES (fntype) != 0
4056 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4057 != void_type_node));
4059 current_function_stdarg = stdarg;
4061 /* If the reg that the virtual arg pointer will be translated into is
4062 not a fixed reg or is the stack pointer, make a copy of the virtual
4063 arg pointer, and address parms via the copy. The frame pointer is
4064 considered fixed even though it is not marked as such.
4066 The second time through, simply use ap to avoid generating rtx. */
4068 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4069 || ! (fixed_regs[ARG_POINTER_REGNUM]
4070 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4071 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4072 else
4073 internal_arg_pointer = virtual_incoming_args_rtx;
4074 current_function_internal_arg_pointer = internal_arg_pointer;
4076 stack_args_size.constant = 0;
4077 stack_args_size.var = 0;
4079 /* If struct value address is treated as the first argument, make it so. */
4080 if (aggregate_value_p (DECL_RESULT (fndecl))
4081 && ! current_function_returns_pcc_struct
4082 && struct_value_incoming_rtx == 0)
4084 tree type = build_pointer_type (TREE_TYPE (fntype));
4086 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4088 DECL_ARG_TYPE (function_result_decl) = type;
4089 TREE_CHAIN (function_result_decl) = fnargs;
4090 fnargs = function_result_decl;
4093 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4094 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4096 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4097 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4098 #else
4099 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4100 #endif
4102 /* We haven't yet found an argument that we must push and pretend the
4103 caller did. */
4104 current_function_pretend_args_size = 0;
4106 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4108 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4109 struct args_size stack_offset;
4110 struct args_size arg_size;
4111 int passed_pointer = 0;
4112 int did_conversion = 0;
4113 tree passed_type = DECL_ARG_TYPE (parm);
4114 tree nominal_type = TREE_TYPE (parm);
4115 int pretend_named;
4117 /* Set LAST_NAMED if this is last named arg before some
4118 anonymous args. */
4119 int last_named = ((TREE_CHAIN (parm) == 0
4120 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4121 && (stdarg || current_function_varargs));
4122 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4123 most machines, if this is a varargs/stdarg function, then we treat
4124 the last named arg as if it were anonymous too. */
4125 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4127 if (TREE_TYPE (parm) == error_mark_node
4128 /* This can happen after weird syntax errors
4129 or if an enum type is defined among the parms. */
4130 || TREE_CODE (parm) != PARM_DECL
4131 || passed_type == NULL)
4133 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4134 = gen_rtx_MEM (BLKmode, const0_rtx);
4135 TREE_USED (parm) = 1;
4136 continue;
4139 /* For varargs.h function, save info about regs and stack space
4140 used by the individual args, not including the va_alist arg. */
4141 if (hide_last_arg && last_named)
4142 current_function_args_info = args_so_far;
4144 /* Find mode of arg as it is passed, and mode of arg
4145 as it should be during execution of this function. */
4146 passed_mode = TYPE_MODE (passed_type);
4147 nominal_mode = TYPE_MODE (nominal_type);
4149 /* If the parm's mode is VOID, its value doesn't matter,
4150 and avoid the usual things like emit_move_insn that could crash. */
4151 if (nominal_mode == VOIDmode)
4153 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4154 continue;
4157 /* If the parm is to be passed as a transparent union, use the
4158 type of the first field for the tests below. We have already
4159 verified that the modes are the same. */
4160 if (DECL_TRANSPARENT_UNION (parm)
4161 || TYPE_TRANSPARENT_UNION (passed_type))
4162 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4164 /* See if this arg was passed by invisible reference. It is if
4165 it is an object whose size depends on the contents of the
4166 object itself or if the machine requires these objects be passed
4167 that way. */
4169 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4170 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4171 || TREE_ADDRESSABLE (passed_type)
4172 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4173 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4174 passed_type, named_arg)
4175 #endif
4178 passed_type = nominal_type = build_pointer_type (passed_type);
4179 passed_pointer = 1;
4180 passed_mode = nominal_mode = Pmode;
4183 promoted_mode = passed_mode;
4185 #ifdef PROMOTE_FUNCTION_ARGS
4186 /* Compute the mode in which the arg is actually extended to. */
4187 unsignedp = TREE_UNSIGNED (passed_type);
4188 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4189 #endif
4191 /* Let machine desc say which reg (if any) the parm arrives in.
4192 0 means it arrives on the stack. */
4193 #ifdef FUNCTION_INCOMING_ARG
4194 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4195 passed_type, named_arg);
4196 #else
4197 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4198 passed_type, named_arg);
4199 #endif
4201 if (entry_parm == 0)
4202 promoted_mode = passed_mode;
4204 #ifdef SETUP_INCOMING_VARARGS
4205 /* If this is the last named parameter, do any required setup for
4206 varargs or stdargs. We need to know about the case of this being an
4207 addressable type, in which case we skip the registers it
4208 would have arrived in.
4210 For stdargs, LAST_NAMED will be set for two parameters, the one that
4211 is actually the last named, and the dummy parameter. We only
4212 want to do this action once.
4214 Also, indicate when RTL generation is to be suppressed. */
4215 if (last_named && !varargs_setup)
4217 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4218 current_function_pretend_args_size, 0);
4219 varargs_setup = 1;
4221 #endif
4223 /* Determine parm's home in the stack,
4224 in case it arrives in the stack or we should pretend it did.
4226 Compute the stack position and rtx where the argument arrives
4227 and its size.
4229 There is one complexity here: If this was a parameter that would
4230 have been passed in registers, but wasn't only because it is
4231 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4232 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4233 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4234 0 as it was the previous time. */
4236 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4237 locate_and_pad_parm (promoted_mode, passed_type,
4238 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4240 #else
4241 #ifdef FUNCTION_INCOMING_ARG
4242 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4243 passed_type,
4244 pretend_named) != 0,
4245 #else
4246 FUNCTION_ARG (args_so_far, promoted_mode,
4247 passed_type,
4248 pretend_named) != 0,
4249 #endif
4250 #endif
4251 fndecl, &stack_args_size, &stack_offset, &arg_size,
4252 &alignment_pad);
4255 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4257 if (offset_rtx == const0_rtx)
4258 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4259 else
4260 stack_parm = gen_rtx_MEM (promoted_mode,
4261 gen_rtx_PLUS (Pmode,
4262 internal_arg_pointer,
4263 offset_rtx));
4265 /* If this is a memory ref that contains aggregate components,
4266 mark it as such for cse and loop optimize. Likewise if it
4267 is readonly. */
4268 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4269 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4270 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4273 /* If this parameter was passed both in registers and in the stack,
4274 use the copy on the stack. */
4275 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4276 entry_parm = 0;
4278 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4279 /* If this parm was passed part in regs and part in memory,
4280 pretend it arrived entirely in memory
4281 by pushing the register-part onto the stack.
4283 In the special case of a DImode or DFmode that is split,
4284 we could put it together in a pseudoreg directly,
4285 but for now that's not worth bothering with. */
4287 if (entry_parm)
4289 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4290 passed_type, named_arg);
4292 if (nregs > 0)
4294 current_function_pretend_args_size
4295 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4296 / (PARM_BOUNDARY / BITS_PER_UNIT)
4297 * (PARM_BOUNDARY / BITS_PER_UNIT));
4299 /* Handle calls that pass values in multiple non-contiguous
4300 locations. The Irix 6 ABI has examples of this. */
4301 if (GET_CODE (entry_parm) == PARALLEL)
4302 emit_group_store (validize_mem (stack_parm), entry_parm,
4303 int_size_in_bytes (TREE_TYPE (parm)),
4304 (TYPE_ALIGN (TREE_TYPE (parm))
4305 / BITS_PER_UNIT));
4306 else
4307 move_block_from_reg (REGNO (entry_parm),
4308 validize_mem (stack_parm), nregs,
4309 int_size_in_bytes (TREE_TYPE (parm)));
4311 entry_parm = stack_parm;
4314 #endif
4316 /* If we didn't decide this parm came in a register,
4317 by default it came on the stack. */
4318 if (entry_parm == 0)
4319 entry_parm = stack_parm;
4321 /* Record permanently how this parm was passed. */
4322 DECL_INCOMING_RTL (parm) = entry_parm;
4324 /* If there is actually space on the stack for this parm,
4325 count it in stack_args_size; otherwise set stack_parm to 0
4326 to indicate there is no preallocated stack slot for the parm. */
4328 if (entry_parm == stack_parm
4329 || (GET_CODE (entry_parm) == PARALLEL
4330 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4331 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4332 /* On some machines, even if a parm value arrives in a register
4333 there is still an (uninitialized) stack slot allocated for it.
4335 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4336 whether this parameter already has a stack slot allocated,
4337 because an arg block exists only if current_function_args_size
4338 is larger than some threshold, and we haven't calculated that
4339 yet. So, for now, we just assume that stack slots never exist
4340 in this case. */
4341 || REG_PARM_STACK_SPACE (fndecl) > 0
4342 #endif
4345 stack_args_size.constant += arg_size.constant;
4346 if (arg_size.var)
4347 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4349 else
4350 /* No stack slot was pushed for this parm. */
4351 stack_parm = 0;
4353 /* Update info on where next arg arrives in registers. */
4355 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4356 passed_type, named_arg);
4358 /* If we can't trust the parm stack slot to be aligned enough
4359 for its ultimate type, don't use that slot after entry.
4360 We'll make another stack slot, if we need one. */
4362 unsigned int thisparm_boundary
4363 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4365 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4366 stack_parm = 0;
4369 /* If parm was passed in memory, and we need to convert it on entry,
4370 don't store it back in that same slot. */
4371 if (entry_parm != 0
4372 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4373 stack_parm = 0;
4375 #if 0
4376 /* Now adjust STACK_PARM to the mode and precise location
4377 where this parameter should live during execution,
4378 if we discover that it must live in the stack during execution.
4379 To make debuggers happier on big-endian machines, we store
4380 the value in the last bytes of the space available. */
4382 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4383 && stack_parm != 0)
4385 rtx offset_rtx;
4387 if (BYTES_BIG_ENDIAN
4388 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4389 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4390 - GET_MODE_SIZE (nominal_mode));
4392 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4393 if (offset_rtx == const0_rtx)
4394 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4395 else
4396 stack_parm = gen_rtx_MEM (nominal_mode,
4397 gen_rtx_PLUS (Pmode,
4398 internal_arg_pointer,
4399 offset_rtx));
4401 /* If this is a memory ref that contains aggregate components,
4402 mark it as such for cse and loop optimize. */
4403 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4405 #endif /* 0 */
4407 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4408 in the mode in which it arrives.
4409 STACK_PARM is an RTX for a stack slot where the parameter can live
4410 during the function (in case we want to put it there).
4411 STACK_PARM is 0 if no stack slot was pushed for it.
4413 Now output code if necessary to convert ENTRY_PARM to
4414 the type in which this function declares it,
4415 and store that result in an appropriate place,
4416 which may be a pseudo reg, may be STACK_PARM,
4417 or may be a local stack slot if STACK_PARM is 0.
4419 Set DECL_RTL to that place. */
4421 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4423 /* If a BLKmode arrives in registers, copy it to a stack slot.
4424 Handle calls that pass values in multiple non-contiguous
4425 locations. The Irix 6 ABI has examples of this. */
4426 if (GET_CODE (entry_parm) == REG
4427 || GET_CODE (entry_parm) == PARALLEL)
4429 int size_stored
4430 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4431 UNITS_PER_WORD);
4433 /* Note that we will be storing an integral number of words.
4434 So we have to be careful to ensure that we allocate an
4435 integral number of words. We do this below in the
4436 assign_stack_local if space was not allocated in the argument
4437 list. If it was, this will not work if PARM_BOUNDARY is not
4438 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4439 if it becomes a problem. */
4441 if (stack_parm == 0)
4443 stack_parm
4444 = assign_stack_local (GET_MODE (entry_parm),
4445 size_stored, 0);
4447 /* If this is a memory ref that contains aggregate
4448 components, mark it as such for cse and loop optimize. */
4449 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4452 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4453 abort ();
4455 if (TREE_READONLY (parm))
4456 RTX_UNCHANGING_P (stack_parm) = 1;
4458 /* Handle calls that pass values in multiple non-contiguous
4459 locations. The Irix 6 ABI has examples of this. */
4460 if (GET_CODE (entry_parm) == PARALLEL)
4461 emit_group_store (validize_mem (stack_parm), entry_parm,
4462 int_size_in_bytes (TREE_TYPE (parm)),
4463 (TYPE_ALIGN (TREE_TYPE (parm))
4464 / BITS_PER_UNIT));
4465 else
4466 move_block_from_reg (REGNO (entry_parm),
4467 validize_mem (stack_parm),
4468 size_stored / UNITS_PER_WORD,
4469 int_size_in_bytes (TREE_TYPE (parm)));
4471 DECL_RTL (parm) = stack_parm;
4473 else if (! ((! optimize
4474 && ! DECL_REGISTER (parm)
4475 && ! DECL_INLINE (fndecl))
4476 /* layout_decl may set this. */
4477 || TREE_ADDRESSABLE (parm)
4478 || TREE_SIDE_EFFECTS (parm)
4479 /* If -ffloat-store specified, don't put explicit
4480 float variables into registers. */
4481 || (flag_float_store
4482 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4483 /* Always assign pseudo to structure return or item passed
4484 by invisible reference. */
4485 || passed_pointer || parm == function_result_decl)
4487 /* Store the parm in a pseudoregister during the function, but we
4488 may need to do it in a wider mode. */
4490 register rtx parmreg;
4491 int regno, regnoi = 0, regnor = 0;
4493 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4495 promoted_nominal_mode
4496 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4498 parmreg = gen_reg_rtx (promoted_nominal_mode);
4499 mark_user_reg (parmreg);
4501 /* If this was an item that we received a pointer to, set DECL_RTL
4502 appropriately. */
4503 if (passed_pointer)
4505 DECL_RTL (parm)
4506 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4507 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4509 else
4510 DECL_RTL (parm) = parmreg;
4512 /* Copy the value into the register. */
4513 if (nominal_mode != passed_mode
4514 || promoted_nominal_mode != promoted_mode)
4516 int save_tree_used;
4517 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4518 mode, by the caller. We now have to convert it to
4519 NOMINAL_MODE, if different. However, PARMREG may be in
4520 a different mode than NOMINAL_MODE if it is being stored
4521 promoted.
4523 If ENTRY_PARM is a hard register, it might be in a register
4524 not valid for operating in its mode (e.g., an odd-numbered
4525 register for a DFmode). In that case, moves are the only
4526 thing valid, so we can't do a convert from there. This
4527 occurs when the calling sequence allow such misaligned
4528 usages.
4530 In addition, the conversion may involve a call, which could
4531 clobber parameters which haven't been copied to pseudo
4532 registers yet. Therefore, we must first copy the parm to
4533 a pseudo reg here, and save the conversion until after all
4534 parameters have been moved. */
4536 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4538 emit_move_insn (tempreg, validize_mem (entry_parm));
4540 push_to_sequence (conversion_insns);
4541 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4543 /* TREE_USED gets set erroneously during expand_assignment. */
4544 save_tree_used = TREE_USED (parm);
4545 expand_assignment (parm,
4546 make_tree (nominal_type, tempreg), 0, 0);
4547 TREE_USED (parm) = save_tree_used;
4548 conversion_insns = get_insns ();
4549 did_conversion = 1;
4550 end_sequence ();
4552 else
4553 emit_move_insn (parmreg, validize_mem (entry_parm));
4555 /* If we were passed a pointer but the actual value
4556 can safely live in a register, put it in one. */
4557 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4558 && ! ((! optimize
4559 && ! DECL_REGISTER (parm)
4560 && ! DECL_INLINE (fndecl))
4561 /* layout_decl may set this. */
4562 || TREE_ADDRESSABLE (parm)
4563 || TREE_SIDE_EFFECTS (parm)
4564 /* If -ffloat-store specified, don't put explicit
4565 float variables into registers. */
4566 || (flag_float_store
4567 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4569 /* We can't use nominal_mode, because it will have been set to
4570 Pmode above. We must use the actual mode of the parm. */
4571 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4572 mark_user_reg (parmreg);
4573 emit_move_insn (parmreg, DECL_RTL (parm));
4574 DECL_RTL (parm) = parmreg;
4575 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4576 now the parm. */
4577 stack_parm = 0;
4579 #ifdef FUNCTION_ARG_CALLEE_COPIES
4580 /* If we are passed an arg by reference and it is our responsibility
4581 to make a copy, do it now.
4582 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4583 original argument, so we must recreate them in the call to
4584 FUNCTION_ARG_CALLEE_COPIES. */
4585 /* ??? Later add code to handle the case that if the argument isn't
4586 modified, don't do the copy. */
4588 else if (passed_pointer
4589 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4590 TYPE_MODE (DECL_ARG_TYPE (parm)),
4591 DECL_ARG_TYPE (parm),
4592 named_arg)
4593 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4595 rtx copy;
4596 tree type = DECL_ARG_TYPE (parm);
4598 /* This sequence may involve a library call perhaps clobbering
4599 registers that haven't been copied to pseudos yet. */
4601 push_to_sequence (conversion_insns);
4603 if (TYPE_SIZE (type) == 0
4604 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4605 /* This is a variable sized object. */
4606 copy = gen_rtx_MEM (BLKmode,
4607 allocate_dynamic_stack_space
4608 (expr_size (parm), NULL_RTX,
4609 TYPE_ALIGN (type)));
4610 else
4611 copy = assign_stack_temp (TYPE_MODE (type),
4612 int_size_in_bytes (type), 1);
4613 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4614 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4616 store_expr (parm, copy, 0);
4617 emit_move_insn (parmreg, XEXP (copy, 0));
4618 if (current_function_check_memory_usage)
4619 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4620 XEXP (copy, 0), Pmode,
4621 GEN_INT (int_size_in_bytes (type)),
4622 TYPE_MODE (sizetype),
4623 GEN_INT (MEMORY_USE_RW),
4624 TYPE_MODE (integer_type_node));
4625 conversion_insns = get_insns ();
4626 did_conversion = 1;
4627 end_sequence ();
4629 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4631 /* In any case, record the parm's desired stack location
4632 in case we later discover it must live in the stack.
4634 If it is a COMPLEX value, store the stack location for both
4635 halves. */
4637 if (GET_CODE (parmreg) == CONCAT)
4638 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4639 else
4640 regno = REGNO (parmreg);
4642 if (regno >= max_parm_reg)
4644 rtx *new;
4645 int old_max_parm_reg = max_parm_reg;
4647 /* It's slow to expand this one register at a time,
4648 but it's also rare and we need max_parm_reg to be
4649 precisely correct. */
4650 max_parm_reg = regno + 1;
4651 new = (rtx *) xrealloc (parm_reg_stack_loc,
4652 max_parm_reg * sizeof (rtx));
4653 bzero ((char *) (new + old_max_parm_reg),
4654 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4655 parm_reg_stack_loc = new;
4658 if (GET_CODE (parmreg) == CONCAT)
4660 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4662 regnor = REGNO (gen_realpart (submode, parmreg));
4663 regnoi = REGNO (gen_imagpart (submode, parmreg));
4665 if (stack_parm != 0)
4667 parm_reg_stack_loc[regnor]
4668 = gen_realpart (submode, stack_parm);
4669 parm_reg_stack_loc[regnoi]
4670 = gen_imagpart (submode, stack_parm);
4672 else
4674 parm_reg_stack_loc[regnor] = 0;
4675 parm_reg_stack_loc[regnoi] = 0;
4678 else
4679 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4681 /* Mark the register as eliminable if we did no conversion
4682 and it was copied from memory at a fixed offset,
4683 and the arg pointer was not copied to a pseudo-reg.
4684 If the arg pointer is a pseudo reg or the offset formed
4685 an invalid address, such memory-equivalences
4686 as we make here would screw up life analysis for it. */
4687 if (nominal_mode == passed_mode
4688 && ! did_conversion
4689 && stack_parm != 0
4690 && GET_CODE (stack_parm) == MEM
4691 && stack_offset.var == 0
4692 && reg_mentioned_p (virtual_incoming_args_rtx,
4693 XEXP (stack_parm, 0)))
4695 rtx linsn = get_last_insn ();
4696 rtx sinsn, set;
4698 /* Mark complex types separately. */
4699 if (GET_CODE (parmreg) == CONCAT)
4700 /* Scan backwards for the set of the real and
4701 imaginary parts. */
4702 for (sinsn = linsn; sinsn != 0;
4703 sinsn = prev_nonnote_insn (sinsn))
4705 set = single_set (sinsn);
4706 if (set != 0
4707 && SET_DEST (set) == regno_reg_rtx [regnoi])
4708 REG_NOTES (sinsn)
4709 = gen_rtx_EXPR_LIST (REG_EQUIV,
4710 parm_reg_stack_loc[regnoi],
4711 REG_NOTES (sinsn));
4712 else if (set != 0
4713 && SET_DEST (set) == regno_reg_rtx [regnor])
4714 REG_NOTES (sinsn)
4715 = gen_rtx_EXPR_LIST (REG_EQUIV,
4716 parm_reg_stack_loc[regnor],
4717 REG_NOTES (sinsn));
4719 else if ((set = single_set (linsn)) != 0
4720 && SET_DEST (set) == parmreg)
4721 REG_NOTES (linsn)
4722 = gen_rtx_EXPR_LIST (REG_EQUIV,
4723 stack_parm, REG_NOTES (linsn));
4726 /* For pointer data type, suggest pointer register. */
4727 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4728 mark_reg_pointer (parmreg,
4729 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4730 / BITS_PER_UNIT));
4732 else
4734 /* Value must be stored in the stack slot STACK_PARM
4735 during function execution. */
4737 if (promoted_mode != nominal_mode)
4739 /* Conversion is required. */
4740 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4742 emit_move_insn (tempreg, validize_mem (entry_parm));
4744 push_to_sequence (conversion_insns);
4745 entry_parm = convert_to_mode (nominal_mode, tempreg,
4746 TREE_UNSIGNED (TREE_TYPE (parm)));
4747 if (stack_parm)
4749 /* ??? This may need a big-endian conversion on sparc64. */
4750 stack_parm = change_address (stack_parm, nominal_mode,
4751 NULL_RTX);
4753 conversion_insns = get_insns ();
4754 did_conversion = 1;
4755 end_sequence ();
4758 if (entry_parm != stack_parm)
4760 if (stack_parm == 0)
4762 stack_parm
4763 = assign_stack_local (GET_MODE (entry_parm),
4764 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4765 /* If this is a memory ref that contains aggregate components,
4766 mark it as such for cse and loop optimize. */
4767 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4770 if (promoted_mode != nominal_mode)
4772 push_to_sequence (conversion_insns);
4773 emit_move_insn (validize_mem (stack_parm),
4774 validize_mem (entry_parm));
4775 conversion_insns = get_insns ();
4776 end_sequence ();
4778 else
4779 emit_move_insn (validize_mem (stack_parm),
4780 validize_mem (entry_parm));
4782 if (current_function_check_memory_usage)
4784 push_to_sequence (conversion_insns);
4785 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4786 XEXP (stack_parm, 0), Pmode,
4787 GEN_INT (GET_MODE_SIZE (GET_MODE
4788 (entry_parm))),
4789 TYPE_MODE (sizetype),
4790 GEN_INT (MEMORY_USE_RW),
4791 TYPE_MODE (integer_type_node));
4793 conversion_insns = get_insns ();
4794 end_sequence ();
4796 DECL_RTL (parm) = stack_parm;
4799 /* If this "parameter" was the place where we are receiving the
4800 function's incoming structure pointer, set up the result. */
4801 if (parm == function_result_decl)
4803 tree result = DECL_RESULT (fndecl);
4804 tree restype = TREE_TYPE (result);
4806 DECL_RTL (result)
4807 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4809 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4810 AGGREGATE_TYPE_P (restype));
4813 if (TREE_THIS_VOLATILE (parm))
4814 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4815 if (TREE_READONLY (parm))
4816 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4819 /* Output all parameter conversion instructions (possibly including calls)
4820 now that all parameters have been copied out of hard registers. */
4821 emit_insns (conversion_insns);
4823 last_parm_insn = get_last_insn ();
4825 current_function_args_size = stack_args_size.constant;
4827 /* Adjust function incoming argument size for alignment and
4828 minimum length. */
4830 #ifdef REG_PARM_STACK_SPACE
4831 #ifndef MAYBE_REG_PARM_STACK_SPACE
4832 current_function_args_size = MAX (current_function_args_size,
4833 REG_PARM_STACK_SPACE (fndecl));
4834 #endif
4835 #endif
4837 #ifdef STACK_BOUNDARY
4838 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4840 current_function_args_size
4841 = ((current_function_args_size + STACK_BYTES - 1)
4842 / STACK_BYTES) * STACK_BYTES;
4843 #endif
4845 #ifdef ARGS_GROW_DOWNWARD
4846 current_function_arg_offset_rtx
4847 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4848 : expand_expr (size_diffop (stack_args_size.var,
4849 size_int (-stack_args_size.constant)),
4850 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4851 #else
4852 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4853 #endif
4855 /* See how many bytes, if any, of its args a function should try to pop
4856 on return. */
4858 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4859 current_function_args_size);
4861 /* For stdarg.h function, save info about
4862 regs and stack space used by the named args. */
4864 if (!hide_last_arg)
4865 current_function_args_info = args_so_far;
4867 /* Set the rtx used for the function return value. Put this in its
4868 own variable so any optimizers that need this information don't have
4869 to include tree.h. Do this here so it gets done when an inlined
4870 function gets output. */
4872 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4875 /* Indicate whether REGNO is an incoming argument to the current function
4876 that was promoted to a wider mode. If so, return the RTX for the
4877 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4878 that REGNO is promoted from and whether the promotion was signed or
4879 unsigned. */
4881 #ifdef PROMOTE_FUNCTION_ARGS
4884 promoted_input_arg (regno, pmode, punsignedp)
4885 int regno;
4886 enum machine_mode *pmode;
4887 int *punsignedp;
4889 tree arg;
4891 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4892 arg = TREE_CHAIN (arg))
4893 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4894 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4895 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4897 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4898 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4900 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4901 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4902 && mode != DECL_MODE (arg))
4904 *pmode = DECL_MODE (arg);
4905 *punsignedp = unsignedp;
4906 return DECL_INCOMING_RTL (arg);
4910 return 0;
4913 #endif
4915 /* Compute the size and offset from the start of the stacked arguments for a
4916 parm passed in mode PASSED_MODE and with type TYPE.
4918 INITIAL_OFFSET_PTR points to the current offset into the stacked
4919 arguments.
4921 The starting offset and size for this parm are returned in *OFFSET_PTR
4922 and *ARG_SIZE_PTR, respectively.
4924 IN_REGS is non-zero if the argument will be passed in registers. It will
4925 never be set if REG_PARM_STACK_SPACE is not defined.
4927 FNDECL is the function in which the argument was defined.
4929 There are two types of rounding that are done. The first, controlled by
4930 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4931 list to be aligned to the specific boundary (in bits). This rounding
4932 affects the initial and starting offsets, but not the argument size.
4934 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4935 optionally rounds the size of the parm to PARM_BOUNDARY. The
4936 initial offset is not affected by this rounding, while the size always
4937 is and the starting offset may be. */
4939 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4940 initial_offset_ptr is positive because locate_and_pad_parm's
4941 callers pass in the total size of args so far as
4942 initial_offset_ptr. arg_size_ptr is always positive.*/
4944 void
4945 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4946 initial_offset_ptr, offset_ptr, arg_size_ptr,
4947 alignment_pad)
4948 enum machine_mode passed_mode;
4949 tree type;
4950 int in_regs ATTRIBUTE_UNUSED;
4951 tree fndecl ATTRIBUTE_UNUSED;
4952 struct args_size *initial_offset_ptr;
4953 struct args_size *offset_ptr;
4954 struct args_size *arg_size_ptr;
4955 struct args_size *alignment_pad;
4958 tree sizetree
4959 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4960 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4961 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4963 #ifdef REG_PARM_STACK_SPACE
4964 /* If we have found a stack parm before we reach the end of the
4965 area reserved for registers, skip that area. */
4966 if (! in_regs)
4968 int reg_parm_stack_space = 0;
4970 #ifdef MAYBE_REG_PARM_STACK_SPACE
4971 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4972 #else
4973 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4974 #endif
4975 if (reg_parm_stack_space > 0)
4977 if (initial_offset_ptr->var)
4979 initial_offset_ptr->var
4980 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4981 ssize_int (reg_parm_stack_space));
4982 initial_offset_ptr->constant = 0;
4984 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4985 initial_offset_ptr->constant = reg_parm_stack_space;
4988 #endif /* REG_PARM_STACK_SPACE */
4990 arg_size_ptr->var = 0;
4991 arg_size_ptr->constant = 0;
4993 #ifdef ARGS_GROW_DOWNWARD
4994 if (initial_offset_ptr->var)
4996 offset_ptr->constant = 0;
4997 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
4998 initial_offset_ptr->var);
5000 else
5002 offset_ptr->constant = - initial_offset_ptr->constant;
5003 offset_ptr->var = 0;
5005 if (where_pad != none
5006 && (TREE_CODE (sizetree) != INTEGER_CST
5007 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5008 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5009 SUB_PARM_SIZE (*offset_ptr, sizetree);
5010 if (where_pad != downward)
5011 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5012 if (initial_offset_ptr->var)
5013 arg_size_ptr->var = size_binop (MINUS_EXPR,
5014 size_binop (MINUS_EXPR,
5015 ssize_int (0),
5016 initial_offset_ptr->var),
5017 offset_ptr->var);
5019 else
5020 arg_size_ptr->constant = (- initial_offset_ptr->constant
5021 - offset_ptr->constant);
5023 #else /* !ARGS_GROW_DOWNWARD */
5024 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5025 *offset_ptr = *initial_offset_ptr;
5027 #ifdef PUSH_ROUNDING
5028 if (passed_mode != BLKmode)
5029 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5030 #endif
5032 /* Pad_below needs the pre-rounded size to know how much to pad below
5033 so this must be done before rounding up. */
5034 if (where_pad == downward
5035 /* However, BLKmode args passed in regs have their padding done elsewhere.
5036 The stack slot must be able to hold the entire register. */
5037 && !(in_regs && passed_mode == BLKmode))
5038 pad_below (offset_ptr, passed_mode, sizetree);
5040 if (where_pad != none
5041 && (TREE_CODE (sizetree) != INTEGER_CST
5042 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5043 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5045 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5046 #endif /* ARGS_GROW_DOWNWARD */
5049 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5050 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5052 static void
5053 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5054 struct args_size *offset_ptr;
5055 int boundary;
5056 struct args_size *alignment_pad;
5058 tree save_var = NULL_TREE;
5059 HOST_WIDE_INT save_constant = 0;
5061 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5063 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5065 save_var = offset_ptr->var;
5066 save_constant = offset_ptr->constant;
5069 alignment_pad->var = NULL_TREE;
5070 alignment_pad->constant = 0;
5072 if (boundary > BITS_PER_UNIT)
5074 if (offset_ptr->var)
5076 offset_ptr->var =
5077 #ifdef ARGS_GROW_DOWNWARD
5078 round_down
5079 #else
5080 round_up
5081 #endif
5082 (ARGS_SIZE_TREE (*offset_ptr),
5083 boundary / BITS_PER_UNIT);
5084 offset_ptr->constant = 0; /*?*/
5085 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5086 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5087 save_var);
5089 else
5091 offset_ptr->constant =
5092 #ifdef ARGS_GROW_DOWNWARD
5093 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5094 #else
5095 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5096 #endif
5097 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5098 alignment_pad->constant = offset_ptr->constant - save_constant;
5103 #ifndef ARGS_GROW_DOWNWARD
5104 static void
5105 pad_below (offset_ptr, passed_mode, sizetree)
5106 struct args_size *offset_ptr;
5107 enum machine_mode passed_mode;
5108 tree sizetree;
5110 if (passed_mode != BLKmode)
5112 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5113 offset_ptr->constant
5114 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5115 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5116 - GET_MODE_SIZE (passed_mode));
5118 else
5120 if (TREE_CODE (sizetree) != INTEGER_CST
5121 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5123 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5124 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5125 /* Add it in. */
5126 ADD_PARM_SIZE (*offset_ptr, s2);
5127 SUB_PARM_SIZE (*offset_ptr, sizetree);
5131 #endif
5133 /* Walk the tree of blocks describing the binding levels within a function
5134 and warn about uninitialized variables.
5135 This is done after calling flow_analysis and before global_alloc
5136 clobbers the pseudo-regs to hard regs. */
5138 void
5139 uninitialized_vars_warning (block)
5140 tree block;
5142 register tree decl, sub;
5143 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5145 if (warn_uninitialized
5146 && TREE_CODE (decl) == VAR_DECL
5147 /* These warnings are unreliable for and aggregates
5148 because assigning the fields one by one can fail to convince
5149 flow.c that the entire aggregate was initialized.
5150 Unions are troublesome because members may be shorter. */
5151 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5152 && DECL_RTL (decl) != 0
5153 && GET_CODE (DECL_RTL (decl)) == REG
5154 /* Global optimizations can make it difficult to determine if a
5155 particular variable has been initialized. However, a VAR_DECL
5156 with a nonzero DECL_INITIAL had an initializer, so do not
5157 claim it is potentially uninitialized.
5159 We do not care about the actual value in DECL_INITIAL, so we do
5160 not worry that it may be a dangling pointer. */
5161 && DECL_INITIAL (decl) == NULL_TREE
5162 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5163 warning_with_decl (decl,
5164 "`%s' might be used uninitialized in this function");
5165 if (extra_warnings
5166 && TREE_CODE (decl) == VAR_DECL
5167 && DECL_RTL (decl) != 0
5168 && GET_CODE (DECL_RTL (decl)) == REG
5169 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5170 warning_with_decl (decl,
5171 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5173 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5174 uninitialized_vars_warning (sub);
5177 /* Do the appropriate part of uninitialized_vars_warning
5178 but for arguments instead of local variables. */
5180 void
5181 setjmp_args_warning ()
5183 register tree decl;
5184 for (decl = DECL_ARGUMENTS (current_function_decl);
5185 decl; decl = TREE_CHAIN (decl))
5186 if (DECL_RTL (decl) != 0
5187 && GET_CODE (DECL_RTL (decl)) == REG
5188 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5189 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5192 /* If this function call setjmp, put all vars into the stack
5193 unless they were declared `register'. */
5195 void
5196 setjmp_protect (block)
5197 tree block;
5199 register tree decl, sub;
5200 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5201 if ((TREE_CODE (decl) == VAR_DECL
5202 || TREE_CODE (decl) == PARM_DECL)
5203 && DECL_RTL (decl) != 0
5204 && (GET_CODE (DECL_RTL (decl)) == REG
5205 || (GET_CODE (DECL_RTL (decl)) == MEM
5206 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5207 /* If this variable came from an inline function, it must be
5208 that its life doesn't overlap the setjmp. If there was a
5209 setjmp in the function, it would already be in memory. We
5210 must exclude such variable because their DECL_RTL might be
5211 set to strange things such as virtual_stack_vars_rtx. */
5212 && ! DECL_FROM_INLINE (decl)
5213 && (
5214 #ifdef NON_SAVING_SETJMP
5215 /* If longjmp doesn't restore the registers,
5216 don't put anything in them. */
5217 NON_SAVING_SETJMP
5219 #endif
5220 ! DECL_REGISTER (decl)))
5221 put_var_into_stack (decl);
5222 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5223 setjmp_protect (sub);
5226 /* Like the previous function, but for args instead of local variables. */
5228 void
5229 setjmp_protect_args ()
5231 register tree decl;
5232 for (decl = DECL_ARGUMENTS (current_function_decl);
5233 decl; decl = TREE_CHAIN (decl))
5234 if ((TREE_CODE (decl) == VAR_DECL
5235 || TREE_CODE (decl) == PARM_DECL)
5236 && DECL_RTL (decl) != 0
5237 && (GET_CODE (DECL_RTL (decl)) == REG
5238 || (GET_CODE (DECL_RTL (decl)) == MEM
5239 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5240 && (
5241 /* If longjmp doesn't restore the registers,
5242 don't put anything in them. */
5243 #ifdef NON_SAVING_SETJMP
5244 NON_SAVING_SETJMP
5246 #endif
5247 ! DECL_REGISTER (decl)))
5248 put_var_into_stack (decl);
5251 /* Return the context-pointer register corresponding to DECL,
5252 or 0 if it does not need one. */
5255 lookup_static_chain (decl)
5256 tree decl;
5258 tree context = decl_function_context (decl);
5259 tree link;
5261 if (context == 0
5262 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5263 return 0;
5265 /* We treat inline_function_decl as an alias for the current function
5266 because that is the inline function whose vars, types, etc.
5267 are being merged into the current function.
5268 See expand_inline_function. */
5269 if (context == current_function_decl || context == inline_function_decl)
5270 return virtual_stack_vars_rtx;
5272 for (link = context_display; link; link = TREE_CHAIN (link))
5273 if (TREE_PURPOSE (link) == context)
5274 return RTL_EXPR_RTL (TREE_VALUE (link));
5276 abort ();
5279 /* Convert a stack slot address ADDR for variable VAR
5280 (from a containing function)
5281 into an address valid in this function (using a static chain). */
5284 fix_lexical_addr (addr, var)
5285 rtx addr;
5286 tree var;
5288 rtx basereg;
5289 HOST_WIDE_INT displacement;
5290 tree context = decl_function_context (var);
5291 struct function *fp;
5292 rtx base = 0;
5294 /* If this is the present function, we need not do anything. */
5295 if (context == current_function_decl || context == inline_function_decl)
5296 return addr;
5298 for (fp = outer_function_chain; fp; fp = fp->next)
5299 if (fp->decl == context)
5300 break;
5302 if (fp == 0)
5303 abort ();
5305 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5306 addr = XEXP (XEXP (addr, 0), 0);
5308 /* Decode given address as base reg plus displacement. */
5309 if (GET_CODE (addr) == REG)
5310 basereg = addr, displacement = 0;
5311 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5312 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5313 else
5314 abort ();
5316 /* We accept vars reached via the containing function's
5317 incoming arg pointer and via its stack variables pointer. */
5318 if (basereg == fp->internal_arg_pointer)
5320 /* If reached via arg pointer, get the arg pointer value
5321 out of that function's stack frame.
5323 There are two cases: If a separate ap is needed, allocate a
5324 slot in the outer function for it and dereference it that way.
5325 This is correct even if the real ap is actually a pseudo.
5326 Otherwise, just adjust the offset from the frame pointer to
5327 compensate. */
5329 #ifdef NEED_SEPARATE_AP
5330 rtx addr;
5332 if (fp->x_arg_pointer_save_area == 0)
5333 fp->x_arg_pointer_save_area
5334 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5336 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5337 addr = memory_address (Pmode, addr);
5339 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5340 #else
5341 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5342 base = lookup_static_chain (var);
5343 #endif
5346 else if (basereg == virtual_stack_vars_rtx)
5348 /* This is the same code as lookup_static_chain, duplicated here to
5349 avoid an extra call to decl_function_context. */
5350 tree link;
5352 for (link = context_display; link; link = TREE_CHAIN (link))
5353 if (TREE_PURPOSE (link) == context)
5355 base = RTL_EXPR_RTL (TREE_VALUE (link));
5356 break;
5360 if (base == 0)
5361 abort ();
5363 /* Use same offset, relative to appropriate static chain or argument
5364 pointer. */
5365 return plus_constant (base, displacement);
5368 /* Return the address of the trampoline for entering nested fn FUNCTION.
5369 If necessary, allocate a trampoline (in the stack frame)
5370 and emit rtl to initialize its contents (at entry to this function). */
5373 trampoline_address (function)
5374 tree function;
5376 tree link;
5377 tree rtlexp;
5378 rtx tramp;
5379 struct function *fp;
5380 tree fn_context;
5382 /* Find an existing trampoline and return it. */
5383 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5384 if (TREE_PURPOSE (link) == function)
5385 return
5386 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5388 for (fp = outer_function_chain; fp; fp = fp->next)
5389 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5390 if (TREE_PURPOSE (link) == function)
5392 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5393 function);
5394 return round_trampoline_addr (tramp);
5397 /* None exists; we must make one. */
5399 /* Find the `struct function' for the function containing FUNCTION. */
5400 fp = 0;
5401 fn_context = decl_function_context (function);
5402 if (fn_context != current_function_decl
5403 && fn_context != inline_function_decl)
5404 for (fp = outer_function_chain; fp; fp = fp->next)
5405 if (fp->decl == fn_context)
5406 break;
5408 /* Allocate run-time space for this trampoline
5409 (usually in the defining function's stack frame). */
5410 #ifdef ALLOCATE_TRAMPOLINE
5411 tramp = ALLOCATE_TRAMPOLINE (fp);
5412 #else
5413 /* If rounding needed, allocate extra space
5414 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5415 #ifdef TRAMPOLINE_ALIGNMENT
5416 #define TRAMPOLINE_REAL_SIZE \
5417 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5418 #else
5419 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5420 #endif
5421 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5422 fp ? fp : cfun);
5423 #endif
5425 /* Record the trampoline for reuse and note it for later initialization
5426 by expand_function_end. */
5427 if (fp != 0)
5429 push_obstacks (fp->function_maybepermanent_obstack,
5430 fp->function_maybepermanent_obstack);
5431 rtlexp = make_node (RTL_EXPR);
5432 RTL_EXPR_RTL (rtlexp) = tramp;
5433 fp->x_trampoline_list = tree_cons (function, rtlexp,
5434 fp->x_trampoline_list);
5435 pop_obstacks ();
5437 else
5439 /* Make the RTL_EXPR node temporary, not momentary, so that the
5440 trampoline_list doesn't become garbage. */
5441 int momentary = suspend_momentary ();
5442 rtlexp = make_node (RTL_EXPR);
5443 resume_momentary (momentary);
5445 RTL_EXPR_RTL (rtlexp) = tramp;
5446 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5449 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5450 return round_trampoline_addr (tramp);
5453 /* Given a trampoline address,
5454 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5456 static rtx
5457 round_trampoline_addr (tramp)
5458 rtx tramp;
5460 #ifdef TRAMPOLINE_ALIGNMENT
5461 /* Round address up to desired boundary. */
5462 rtx temp = gen_reg_rtx (Pmode);
5463 temp = expand_binop (Pmode, add_optab, tramp,
5464 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5465 temp, 0, OPTAB_LIB_WIDEN);
5466 tramp = expand_binop (Pmode, and_optab, temp,
5467 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5468 temp, 0, OPTAB_LIB_WIDEN);
5469 #endif
5470 return tramp;
5473 /* The functions identify_blocks and reorder_blocks provide a way to
5474 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5475 duplicate portions of the RTL code. Call identify_blocks before
5476 changing the RTL, and call reorder_blocks after. */
5478 /* Put all this function's BLOCK nodes including those that are chained
5479 onto the first block into a vector, and return it.
5480 Also store in each NOTE for the beginning or end of a block
5481 the index of that block in the vector.
5482 The arguments are BLOCK, the chain of top-level blocks of the function,
5483 and INSNS, the insn chain of the function. */
5485 void
5486 identify_blocks (block, insns)
5487 tree block;
5488 rtx insns;
5490 int n_blocks;
5491 tree *block_vector;
5492 tree *block_stack;
5493 int depth = 0;
5494 int current_block_number = 1;
5495 rtx insn;
5497 if (block == 0)
5498 return;
5500 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5501 depth-first order. */
5502 block_vector = get_block_vector (block, &n_blocks);
5503 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5505 for (insn = insns; insn; insn = NEXT_INSN (insn))
5506 if (GET_CODE (insn) == NOTE)
5508 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5510 tree b;
5512 /* If there are more block notes than BLOCKs, something
5513 is badly wrong. */
5514 if (current_block_number == n_blocks)
5515 abort ();
5517 b = block_vector[current_block_number++];
5518 NOTE_BLOCK (insn) = b;
5519 block_stack[depth++] = b;
5521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5523 if (depth == 0)
5524 /* There are more NOTE_INSN_BLOCK_ENDs that
5525 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5526 abort ();
5528 NOTE_BLOCK (insn) = block_stack[--depth];
5532 free (block_vector);
5533 free (block_stack);
5536 /* Given a revised instruction chain, rebuild the tree structure of
5537 BLOCK nodes to correspond to the new order of RTL. The new block
5538 tree is inserted below TOP_BLOCK. Returns the current top-level
5539 block. */
5541 tree
5542 reorder_blocks (block, insns)
5543 tree block;
5544 rtx insns;
5546 tree current_block = block;
5547 rtx insn;
5548 varray_type block_stack;
5550 if (block == NULL_TREE)
5551 return NULL_TREE;
5553 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5555 /* Prune the old trees away, so that it doesn't get in the way. */
5556 BLOCK_SUBBLOCKS (current_block) = 0;
5557 BLOCK_CHAIN (current_block) = 0;
5559 for (insn = insns; insn; insn = NEXT_INSN (insn))
5560 if (GET_CODE (insn) == NOTE)
5562 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5564 tree block = NOTE_BLOCK (insn);
5565 /* If we have seen this block before, copy it. */
5566 if (TREE_ASM_WRITTEN (block))
5568 block = copy_node (block);
5569 NOTE_BLOCK (insn) = block;
5571 BLOCK_SUBBLOCKS (block) = 0;
5572 TREE_ASM_WRITTEN (block) = 1;
5573 BLOCK_SUPERCONTEXT (block) = current_block;
5574 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5575 BLOCK_SUBBLOCKS (current_block) = block;
5576 current_block = block;
5577 VARRAY_PUSH_TREE (block_stack, block);
5579 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5581 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (block_stack);
5582 VARRAY_POP (block_stack);
5583 BLOCK_SUBBLOCKS (current_block)
5584 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5585 current_block = BLOCK_SUPERCONTEXT (current_block);
5589 BLOCK_SUBBLOCKS (current_block)
5590 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5592 VARRAY_FREE (block_stack);
5594 return current_block;
5597 /* Reverse the order of elements in the chain T of blocks,
5598 and return the new head of the chain (old last element). */
5600 static tree
5601 blocks_nreverse (t)
5602 tree t;
5604 register tree prev = 0, decl, next;
5605 for (decl = t; decl; decl = next)
5607 next = BLOCK_CHAIN (decl);
5608 BLOCK_CHAIN (decl) = prev;
5609 prev = decl;
5611 return prev;
5614 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5615 non-NULL, list them all into VECTOR, in a depth-first preorder
5616 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5617 blocks. */
5619 static int
5620 all_blocks (block, vector)
5621 tree block;
5622 tree *vector;
5624 int n_blocks = 0;
5626 while (block)
5628 TREE_ASM_WRITTEN (block) = 0;
5630 /* Record this block. */
5631 if (vector)
5632 vector[n_blocks] = block;
5634 ++n_blocks;
5636 /* Record the subblocks, and their subblocks... */
5637 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5638 vector ? vector + n_blocks : 0);
5639 block = BLOCK_CHAIN (block);
5642 return n_blocks;
5645 /* Return a vector containing all the blocks rooted at BLOCK. The
5646 number of elements in the vector is stored in N_BLOCKS_P. The
5647 vector is dynamically allocated; it is the caller's responsibility
5648 to call `free' on the pointer returned. */
5650 static tree *
5651 get_block_vector (block, n_blocks_p)
5652 tree block;
5653 int *n_blocks_p;
5655 tree *block_vector;
5657 *n_blocks_p = all_blocks (block, NULL);
5658 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5659 all_blocks (block, block_vector);
5661 return block_vector;
5664 static int next_block_index = 2;
5666 /* Set BLOCK_NUMBER for all the blocks in FN. */
5668 void
5669 number_blocks (fn)
5670 tree fn;
5672 int i;
5673 int n_blocks;
5674 tree *block_vector;
5676 /* For SDB and XCOFF debugging output, we start numbering the blocks
5677 from 1 within each function, rather than keeping a running
5678 count. */
5679 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5680 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5681 next_block_index = 1;
5682 #endif
5684 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
5686 /* The top-level BLOCK isn't numbered at all. */
5687 for (i = 1; i < n_blocks; ++i)
5688 /* We number the blocks from two. */
5689 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
5691 free (block_vector);
5693 return;
5697 /* Allocate a function structure and reset its contents to the defaults. */
5698 static void
5699 prepare_function_start ()
5701 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5703 init_stmt_for_function ();
5704 init_eh_for_function ();
5706 cse_not_expected = ! optimize;
5708 /* Caller save not needed yet. */
5709 caller_save_needed = 0;
5711 /* No stack slots have been made yet. */
5712 stack_slot_list = 0;
5714 current_function_has_nonlocal_label = 0;
5715 current_function_has_nonlocal_goto = 0;
5717 /* There is no stack slot for handling nonlocal gotos. */
5718 nonlocal_goto_handler_slots = 0;
5719 nonlocal_goto_stack_level = 0;
5721 /* No labels have been declared for nonlocal use. */
5722 nonlocal_labels = 0;
5723 nonlocal_goto_handler_labels = 0;
5725 /* No function calls so far in this function. */
5726 function_call_count = 0;
5728 /* No parm regs have been allocated.
5729 (This is important for output_inline_function.) */
5730 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5732 /* Initialize the RTL mechanism. */
5733 init_emit ();
5735 /* Initialize the queue of pending postincrement and postdecrements,
5736 and some other info in expr.c. */
5737 init_expr ();
5739 /* We haven't done register allocation yet. */
5740 reg_renumber = 0;
5742 init_varasm_status (cfun);
5744 /* Clear out data used for inlining. */
5745 cfun->inlinable = 0;
5746 cfun->original_decl_initial = 0;
5747 cfun->original_arg_vector = 0;
5749 cfun->stack_alignment_needed = 0;
5750 #ifdef STACK_BOUNDARY
5751 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5752 #endif
5754 /* Set if a call to setjmp is seen. */
5755 current_function_calls_setjmp = 0;
5757 /* Set if a call to longjmp is seen. */
5758 current_function_calls_longjmp = 0;
5760 current_function_calls_alloca = 0;
5761 current_function_contains_functions = 0;
5762 current_function_is_leaf = 0;
5763 current_function_sp_is_unchanging = 0;
5764 current_function_uses_only_leaf_regs = 0;
5765 current_function_has_computed_jump = 0;
5766 current_function_is_thunk = 0;
5768 current_function_returns_pcc_struct = 0;
5769 current_function_returns_struct = 0;
5770 current_function_epilogue_delay_list = 0;
5771 current_function_uses_const_pool = 0;
5772 current_function_uses_pic_offset_table = 0;
5773 current_function_cannot_inline = 0;
5775 /* We have not yet needed to make a label to jump to for tail-recursion. */
5776 tail_recursion_label = 0;
5778 /* We haven't had a need to make a save area for ap yet. */
5779 arg_pointer_save_area = 0;
5781 /* No stack slots allocated yet. */
5782 frame_offset = 0;
5784 /* No SAVE_EXPRs in this function yet. */
5785 save_expr_regs = 0;
5787 /* No RTL_EXPRs in this function yet. */
5788 rtl_expr_chain = 0;
5790 /* Set up to allocate temporaries. */
5791 init_temp_slots ();
5793 /* Indicate that we need to distinguish between the return value of the
5794 present function and the return value of a function being called. */
5795 rtx_equal_function_value_matters = 1;
5797 /* Indicate that we have not instantiated virtual registers yet. */
5798 virtuals_instantiated = 0;
5800 /* Indicate we have no need of a frame pointer yet. */
5801 frame_pointer_needed = 0;
5803 /* By default assume not varargs or stdarg. */
5804 current_function_varargs = 0;
5805 current_function_stdarg = 0;
5807 /* We haven't made any trampolines for this function yet. */
5808 trampoline_list = 0;
5810 init_pending_stack_adjust ();
5811 inhibit_defer_pop = 0;
5813 current_function_outgoing_args_size = 0;
5815 if (init_lang_status)
5816 (*init_lang_status) (cfun);
5817 if (init_machine_status)
5818 (*init_machine_status) (cfun);
5821 /* Initialize the rtl expansion mechanism so that we can do simple things
5822 like generate sequences. This is used to provide a context during global
5823 initialization of some passes. */
5824 void
5825 init_dummy_function_start ()
5827 prepare_function_start ();
5830 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5831 and initialize static variables for generating RTL for the statements
5832 of the function. */
5834 void
5835 init_function_start (subr, filename, line)
5836 tree subr;
5837 char *filename;
5838 int line;
5840 prepare_function_start ();
5842 /* Remember this function for later. */
5843 cfun->next_global = all_functions;
5844 all_functions = cfun;
5846 current_function_name = (*decl_printable_name) (subr, 2);
5847 cfun->decl = subr;
5849 /* Nonzero if this is a nested function that uses a static chain. */
5851 current_function_needs_context
5852 = (decl_function_context (current_function_decl) != 0
5853 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5855 /* Within function body, compute a type's size as soon it is laid out. */
5856 immediate_size_expand++;
5858 /* Prevent ever trying to delete the first instruction of a function.
5859 Also tell final how to output a linenum before the function prologue.
5860 Note linenums could be missing, e.g. when compiling a Java .class file. */
5861 if (line > 0)
5862 emit_line_note (filename, line);
5864 /* Make sure first insn is a note even if we don't want linenums.
5865 This makes sure the first insn will never be deleted.
5866 Also, final expects a note to appear there. */
5867 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5869 /* Set flags used by final.c. */
5870 if (aggregate_value_p (DECL_RESULT (subr)))
5872 #ifdef PCC_STATIC_STRUCT_RETURN
5873 current_function_returns_pcc_struct = 1;
5874 #endif
5875 current_function_returns_struct = 1;
5878 /* Warn if this value is an aggregate type,
5879 regardless of which calling convention we are using for it. */
5880 if (warn_aggregate_return
5881 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5882 warning ("function returns an aggregate");
5884 current_function_returns_pointer
5885 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5888 /* Make sure all values used by the optimization passes have sane
5889 defaults. */
5890 void
5891 init_function_for_compilation ()
5893 reg_renumber = 0;
5894 /* No prologue/epilogue insns yet. */
5895 prologue = epilogue = 0;
5898 /* Indicate that the current function uses extra args
5899 not explicitly mentioned in the argument list in any fashion. */
5901 void
5902 mark_varargs ()
5904 current_function_varargs = 1;
5907 /* Expand a call to __main at the beginning of a possible main function. */
5909 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5910 #undef HAS_INIT_SECTION
5911 #define HAS_INIT_SECTION
5912 #endif
5914 void
5915 expand_main_function ()
5917 #if !defined (HAS_INIT_SECTION)
5918 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5919 VOIDmode, 0);
5920 #endif /* not HAS_INIT_SECTION */
5923 extern struct obstack permanent_obstack;
5925 /* Start the RTL for a new function, and set variables used for
5926 emitting RTL.
5927 SUBR is the FUNCTION_DECL node.
5928 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5929 the function's parameters, which must be run at any return statement. */
5931 void
5932 expand_function_start (subr, parms_have_cleanups)
5933 tree subr;
5934 int parms_have_cleanups;
5936 tree tem;
5937 rtx last_ptr = NULL_RTX;
5939 /* Make sure volatile mem refs aren't considered
5940 valid operands of arithmetic insns. */
5941 init_recog_no_volatile ();
5943 /* Set this before generating any memory accesses. */
5944 current_function_check_memory_usage
5945 = (flag_check_memory_usage
5946 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5948 current_function_instrument_entry_exit
5949 = (flag_instrument_function_entry_exit
5950 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5952 current_function_limit_stack
5953 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5955 /* If function gets a static chain arg, store it in the stack frame.
5956 Do this first, so it gets the first stack slot offset. */
5957 if (current_function_needs_context)
5959 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5961 /* Delay copying static chain if it is not a register to avoid
5962 conflicts with regs used for parameters. */
5963 if (! SMALL_REGISTER_CLASSES
5964 || GET_CODE (static_chain_incoming_rtx) == REG)
5965 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5968 /* If the parameters of this function need cleaning up, get a label
5969 for the beginning of the code which executes those cleanups. This must
5970 be done before doing anything with return_label. */
5971 if (parms_have_cleanups)
5972 cleanup_label = gen_label_rtx ();
5973 else
5974 cleanup_label = 0;
5976 /* Make the label for return statements to jump to, if this machine
5977 does not have a one-instruction return and uses an epilogue,
5978 or if it returns a structure, or if it has parm cleanups. */
5979 #ifdef HAVE_return
5980 if (cleanup_label == 0 && HAVE_return
5981 && ! current_function_instrument_entry_exit
5982 && ! current_function_returns_pcc_struct
5983 && ! (current_function_returns_struct && ! optimize))
5984 return_label = 0;
5985 else
5986 return_label = gen_label_rtx ();
5987 #else
5988 return_label = gen_label_rtx ();
5989 #endif
5991 /* Initialize rtx used to return the value. */
5992 /* Do this before assign_parms so that we copy the struct value address
5993 before any library calls that assign parms might generate. */
5995 /* Decide whether to return the value in memory or in a register. */
5996 if (aggregate_value_p (DECL_RESULT (subr)))
5998 /* Returning something that won't go in a register. */
5999 register rtx value_address = 0;
6001 #ifdef PCC_STATIC_STRUCT_RETURN
6002 if (current_function_returns_pcc_struct)
6004 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6005 value_address = assemble_static_space (size);
6007 else
6008 #endif
6010 /* Expect to be passed the address of a place to store the value.
6011 If it is passed as an argument, assign_parms will take care of
6012 it. */
6013 if (struct_value_incoming_rtx)
6015 value_address = gen_reg_rtx (Pmode);
6016 emit_move_insn (value_address, struct_value_incoming_rtx);
6019 if (value_address)
6021 DECL_RTL (DECL_RESULT (subr))
6022 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6023 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6024 AGGREGATE_TYPE_P (TREE_TYPE
6025 (DECL_RESULT
6026 (subr))));
6029 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6030 /* If return mode is void, this decl rtl should not be used. */
6031 DECL_RTL (DECL_RESULT (subr)) = 0;
6032 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6034 /* If function will end with cleanup code for parms,
6035 compute the return values into a pseudo reg,
6036 which we will copy into the true return register
6037 after the cleanups are done. */
6039 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6041 #ifdef PROMOTE_FUNCTION_RETURN
6042 tree type = TREE_TYPE (DECL_RESULT (subr));
6043 int unsignedp = TREE_UNSIGNED (type);
6045 mode = promote_mode (type, mode, &unsignedp, 1);
6046 #endif
6048 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6050 else
6051 /* Scalar, returned in a register. */
6053 #ifdef FUNCTION_OUTGOING_VALUE
6054 DECL_RTL (DECL_RESULT (subr))
6055 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6056 #else
6057 DECL_RTL (DECL_RESULT (subr))
6058 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6059 #endif
6061 /* Mark this reg as the function's return value. */
6062 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6064 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6065 /* Needed because we may need to move this to memory
6066 in case it's a named return value whose address is taken. */
6067 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6071 /* Initialize rtx for parameters and local variables.
6072 In some cases this requires emitting insns. */
6074 assign_parms (subr);
6076 /* Copy the static chain now if it wasn't a register. The delay is to
6077 avoid conflicts with the parameter passing registers. */
6079 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6080 if (GET_CODE (static_chain_incoming_rtx) != REG)
6081 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6083 /* The following was moved from init_function_start.
6084 The move is supposed to make sdb output more accurate. */
6085 /* Indicate the beginning of the function body,
6086 as opposed to parm setup. */
6087 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6089 if (GET_CODE (get_last_insn ()) != NOTE)
6090 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6091 parm_birth_insn = get_last_insn ();
6093 context_display = 0;
6094 if (current_function_needs_context)
6096 /* Fetch static chain values for containing functions. */
6097 tem = decl_function_context (current_function_decl);
6098 /* Copy the static chain pointer into a pseudo. If we have
6099 small register classes, copy the value from memory if
6100 static_chain_incoming_rtx is a REG. */
6101 if (tem)
6103 /* If the static chain originally came in a register, put it back
6104 there, then move it out in the next insn. The reason for
6105 this peculiar code is to satisfy function integration. */
6106 if (SMALL_REGISTER_CLASSES
6107 && GET_CODE (static_chain_incoming_rtx) == REG)
6108 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6109 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6112 while (tem)
6114 tree rtlexp = make_node (RTL_EXPR);
6116 RTL_EXPR_RTL (rtlexp) = last_ptr;
6117 context_display = tree_cons (tem, rtlexp, context_display);
6118 tem = decl_function_context (tem);
6119 if (tem == 0)
6120 break;
6121 /* Chain thru stack frames, assuming pointer to next lexical frame
6122 is found at the place we always store it. */
6123 #ifdef FRAME_GROWS_DOWNWARD
6124 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6125 #endif
6126 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6127 memory_address (Pmode,
6128 last_ptr)));
6130 /* If we are not optimizing, ensure that we know that this
6131 piece of context is live over the entire function. */
6132 if (! optimize)
6133 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6134 save_expr_regs);
6138 if (current_function_instrument_entry_exit)
6140 rtx fun = DECL_RTL (current_function_decl);
6141 if (GET_CODE (fun) == MEM)
6142 fun = XEXP (fun, 0);
6143 else
6144 abort ();
6145 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6146 fun, Pmode,
6147 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6149 hard_frame_pointer_rtx),
6150 Pmode);
6153 /* After the display initializations is where the tail-recursion label
6154 should go, if we end up needing one. Ensure we have a NOTE here
6155 since some things (like trampolines) get placed before this. */
6156 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6158 /* Evaluate now the sizes of any types declared among the arguments. */
6159 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6161 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6162 EXPAND_MEMORY_USE_BAD);
6163 /* Flush the queue in case this parameter declaration has
6164 side-effects. */
6165 emit_queue ();
6168 /* Make sure there is a line number after the function entry setup code. */
6169 force_next_line_note ();
6172 /* Undo the effects of init_dummy_function_start. */
6173 void
6174 expand_dummy_function_end ()
6176 /* End any sequences that failed to be closed due to syntax errors. */
6177 while (in_sequence_p ())
6178 end_sequence ();
6180 /* Outside function body, can't compute type's actual size
6181 until next function's body starts. */
6183 free_after_parsing (cfun);
6184 free_after_compilation (cfun);
6185 free (cfun);
6186 cfun = 0;
6189 /* Call DOIT for each hard register used as a return value from
6190 the current function. */
6192 void
6193 diddle_return_value (doit, arg)
6194 void (*doit) PARAMS ((rtx, void *));
6195 void *arg;
6197 rtx outgoing = current_function_return_rtx;
6199 if (! outgoing)
6200 return;
6202 if (GET_CODE (outgoing) == REG
6203 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6205 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6206 #ifdef FUNCTION_OUTGOING_VALUE
6207 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6208 #else
6209 outgoing = FUNCTION_VALUE (type, current_function_decl);
6210 #endif
6211 /* If this is a BLKmode structure being returned in registers, then use
6212 the mode computed in expand_return. */
6213 if (GET_MODE (outgoing) == BLKmode)
6214 PUT_MODE (outgoing,
6215 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6218 if (GET_CODE (outgoing) == REG)
6219 (*doit) (outgoing, arg);
6220 else if (GET_CODE (outgoing) == PARALLEL)
6222 int i;
6224 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6226 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6228 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6229 (*doit) (x, arg);
6234 static void
6235 do_clobber_return_reg (reg, arg)
6236 rtx reg;
6237 void *arg ATTRIBUTE_UNUSED;
6239 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6242 void
6243 clobber_return_register ()
6245 diddle_return_value (do_clobber_return_reg, NULL);
6248 static void
6249 do_use_return_reg (reg, arg)
6250 rtx reg;
6251 void *arg ATTRIBUTE_UNUSED;
6253 emit_insn (gen_rtx_USE (VOIDmode, reg));
6256 void
6257 use_return_register ()
6259 diddle_return_value (do_use_return_reg, NULL);
6262 /* Generate RTL for the end of the current function.
6263 FILENAME and LINE are the current position in the source file.
6265 It is up to language-specific callers to do cleanups for parameters--
6266 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6268 void
6269 expand_function_end (filename, line, end_bindings)
6270 char *filename;
6271 int line;
6272 int end_bindings;
6274 tree link;
6276 #ifdef TRAMPOLINE_TEMPLATE
6277 static rtx initial_trampoline;
6278 #endif
6280 finish_expr_for_function ();
6282 #ifdef NON_SAVING_SETJMP
6283 /* Don't put any variables in registers if we call setjmp
6284 on a machine that fails to restore the registers. */
6285 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6287 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6288 setjmp_protect (DECL_INITIAL (current_function_decl));
6290 setjmp_protect_args ();
6292 #endif
6294 /* Save the argument pointer if a save area was made for it. */
6295 if (arg_pointer_save_area)
6297 /* arg_pointer_save_area may not be a valid memory address, so we
6298 have to check it and fix it if necessary. */
6299 rtx seq;
6300 start_sequence ();
6301 emit_move_insn (validize_mem (arg_pointer_save_area),
6302 virtual_incoming_args_rtx);
6303 seq = gen_sequence ();
6304 end_sequence ();
6305 emit_insn_before (seq, tail_recursion_reentry);
6308 /* Initialize any trampolines required by this function. */
6309 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6311 tree function = TREE_PURPOSE (link);
6312 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6313 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6314 #ifdef TRAMPOLINE_TEMPLATE
6315 rtx blktramp;
6316 #endif
6317 rtx seq;
6319 #ifdef TRAMPOLINE_TEMPLATE
6320 /* First make sure this compilation has a template for
6321 initializing trampolines. */
6322 if (initial_trampoline == 0)
6324 end_temporary_allocation ();
6325 initial_trampoline
6326 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6327 resume_temporary_allocation ();
6329 ggc_add_rtx_root (&initial_trampoline, 1);
6331 #endif
6333 /* Generate insns to initialize the trampoline. */
6334 start_sequence ();
6335 tramp = round_trampoline_addr (XEXP (tramp, 0));
6336 #ifdef TRAMPOLINE_TEMPLATE
6337 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6338 emit_block_move (blktramp, initial_trampoline,
6339 GEN_INT (TRAMPOLINE_SIZE),
6340 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6341 #endif
6342 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6343 seq = get_insns ();
6344 end_sequence ();
6346 /* Put those insns at entry to the containing function (this one). */
6347 emit_insns_before (seq, tail_recursion_reentry);
6350 /* If we are doing stack checking and this function makes calls,
6351 do a stack probe at the start of the function to ensure we have enough
6352 space for another stack frame. */
6353 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6355 rtx insn, seq;
6357 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6358 if (GET_CODE (insn) == CALL_INSN)
6360 start_sequence ();
6361 probe_stack_range (STACK_CHECK_PROTECT,
6362 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6363 seq = get_insns ();
6364 end_sequence ();
6365 emit_insns_before (seq, tail_recursion_reentry);
6366 break;
6370 /* Warn about unused parms if extra warnings were specified. */
6371 if (warn_unused && extra_warnings)
6373 tree decl;
6375 for (decl = DECL_ARGUMENTS (current_function_decl);
6376 decl; decl = TREE_CHAIN (decl))
6377 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6378 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6379 warning_with_decl (decl, "unused parameter `%s'");
6382 /* Delete handlers for nonlocal gotos if nothing uses them. */
6383 if (nonlocal_goto_handler_slots != 0
6384 && ! current_function_has_nonlocal_label)
6385 delete_handlers ();
6387 /* End any sequences that failed to be closed due to syntax errors. */
6388 while (in_sequence_p ())
6389 end_sequence ();
6391 /* Outside function body, can't compute type's actual size
6392 until next function's body starts. */
6393 immediate_size_expand--;
6395 clear_pending_stack_adjust ();
6396 do_pending_stack_adjust ();
6398 /* Mark the end of the function body.
6399 If control reaches this insn, the function can drop through
6400 without returning a value. */
6401 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6403 /* Must mark the last line number note in the function, so that the test
6404 coverage code can avoid counting the last line twice. This just tells
6405 the code to ignore the immediately following line note, since there
6406 already exists a copy of this note somewhere above. This line number
6407 note is still needed for debugging though, so we can't delete it. */
6408 if (flag_test_coverage)
6409 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6411 /* Output a linenumber for the end of the function.
6412 SDB depends on this. */
6413 emit_line_note_force (filename, line);
6415 /* Output the label for the actual return from the function,
6416 if one is expected. This happens either because a function epilogue
6417 is used instead of a return instruction, or because a return was done
6418 with a goto in order to run local cleanups, or because of pcc-style
6419 structure returning. */
6421 if (return_label)
6423 /* Before the return label, clobber the return registers so that
6424 they are not propogated live to the rest of the function. This
6425 can only happen with functions that drop through; if there had
6426 been a return statement, there would have either been a return
6427 rtx, or a jump to the return label. */
6428 clobber_return_register ();
6430 emit_label (return_label);
6433 /* C++ uses this. */
6434 if (end_bindings)
6435 expand_end_bindings (0, 0, 0);
6437 /* Now handle any leftover exception regions that may have been
6438 created for the parameters. */
6440 rtx last = get_last_insn ();
6441 rtx label;
6443 expand_leftover_cleanups ();
6445 /* If there are any catch_clauses remaining, output them now. */
6446 emit_insns (catch_clauses);
6447 catch_clauses = NULL_RTX;
6448 /* If the above emitted any code, may sure we jump around it. */
6449 if (last != get_last_insn ())
6451 label = gen_label_rtx ();
6452 last = emit_jump_insn_after (gen_jump (label), last);
6453 last = emit_barrier_after (last);
6454 emit_label (label);
6458 if (current_function_instrument_entry_exit)
6460 rtx fun = DECL_RTL (current_function_decl);
6461 if (GET_CODE (fun) == MEM)
6462 fun = XEXP (fun, 0);
6463 else
6464 abort ();
6465 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6466 fun, Pmode,
6467 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6469 hard_frame_pointer_rtx),
6470 Pmode);
6473 /* If we had calls to alloca, and this machine needs
6474 an accurate stack pointer to exit the function,
6475 insert some code to save and restore the stack pointer. */
6476 #ifdef EXIT_IGNORE_STACK
6477 if (! EXIT_IGNORE_STACK)
6478 #endif
6479 if (current_function_calls_alloca)
6481 rtx tem = 0;
6483 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6484 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6487 /* If scalar return value was computed in a pseudo-reg,
6488 copy that to the hard return register. */
6489 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6490 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6491 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6492 >= FIRST_PSEUDO_REGISTER))
6494 rtx real_decl_result;
6496 #ifdef FUNCTION_OUTGOING_VALUE
6497 real_decl_result
6498 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6499 current_function_decl);
6500 #else
6501 real_decl_result
6502 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6503 current_function_decl);
6504 #endif
6505 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6506 /* If this is a BLKmode structure being returned in registers, then use
6507 the mode computed in expand_return. */
6508 if (GET_MODE (real_decl_result) == BLKmode)
6509 PUT_MODE (real_decl_result,
6510 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6511 emit_move_insn (real_decl_result,
6512 DECL_RTL (DECL_RESULT (current_function_decl)));
6514 /* The delay slot scheduler assumes that current_function_return_rtx
6515 holds the hard register containing the return value, not a temporary
6516 pseudo. */
6517 current_function_return_rtx = real_decl_result;
6520 /* If returning a structure, arrange to return the address of the value
6521 in a place where debuggers expect to find it.
6523 If returning a structure PCC style,
6524 the caller also depends on this value.
6525 And current_function_returns_pcc_struct is not necessarily set. */
6526 if (current_function_returns_struct
6527 || current_function_returns_pcc_struct)
6529 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6530 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6531 #ifdef FUNCTION_OUTGOING_VALUE
6532 rtx outgoing
6533 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6534 current_function_decl);
6535 #else
6536 rtx outgoing
6537 = FUNCTION_VALUE (build_pointer_type (type),
6538 current_function_decl);
6539 #endif
6541 /* Mark this as a function return value so integrate will delete the
6542 assignment and USE below when inlining this function. */
6543 REG_FUNCTION_VALUE_P (outgoing) = 1;
6545 emit_move_insn (outgoing, value_address);
6548 /* ??? This should no longer be necessary since stupid is no longer with
6549 us, but there are some parts of the compiler (eg reload_combine, and
6550 sh mach_dep_reorg) that still try and compute their own lifetime info
6551 instead of using the general framework. */
6552 use_return_register ();
6554 /* If this is an implementation of __throw, do what's necessary to
6555 communicate between __builtin_eh_return and the epilogue. */
6556 expand_eh_return ();
6558 /* Output a return insn if we are using one.
6559 Otherwise, let the rtl chain end here, to drop through
6560 into the epilogue. */
6562 #ifdef HAVE_return
6563 if (HAVE_return)
6565 emit_jump_insn (gen_return ());
6566 emit_barrier ();
6568 #endif
6570 /* Fix up any gotos that jumped out to the outermost
6571 binding level of the function.
6572 Must follow emitting RETURN_LABEL. */
6574 /* If you have any cleanups to do at this point,
6575 and they need to create temporary variables,
6576 then you will lose. */
6577 expand_fixups (get_insns ());
6580 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6581 or a single insn). */
6583 static int *
6584 record_insns (insns)
6585 rtx insns;
6587 int *vec;
6589 if (GET_CODE (insns) == SEQUENCE)
6591 int len = XVECLEN (insns, 0);
6592 vec = (int *) oballoc ((len + 1) * sizeof (int));
6593 vec[len] = 0;
6594 while (--len >= 0)
6595 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6597 else
6599 vec = (int *) oballoc (2 * sizeof (int));
6600 vec[0] = INSN_UID (insns);
6601 vec[1] = 0;
6603 return vec;
6606 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6608 static int
6609 contains (insn, vec)
6610 rtx insn;
6611 int *vec;
6613 register int i, j;
6615 if (GET_CODE (insn) == INSN
6616 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6618 int count = 0;
6619 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6620 for (j = 0; vec[j]; j++)
6621 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6622 count++;
6623 return count;
6625 else
6627 for (j = 0; vec[j]; j++)
6628 if (INSN_UID (insn) == vec[j])
6629 return 1;
6631 return 0;
6635 prologue_epilogue_contains (insn)
6636 rtx insn;
6638 if (prologue && contains (insn, prologue))
6639 return 1;
6640 if (epilogue && contains (insn, epilogue))
6641 return 1;
6642 return 0;
6645 #ifdef HAVE_return
6646 /* Insert gen_return at the end of block BB. This also means updating
6647 block_for_insn appropriately. */
6649 static void
6650 emit_return_into_block (bb)
6651 basic_block bb;
6653 rtx p, end;
6655 end = emit_jump_insn_after (gen_return (), bb->end);
6656 p = NEXT_INSN (bb->end);
6657 while (1)
6659 set_block_for_insn (p, bb);
6660 if (p == end)
6661 break;
6662 p = NEXT_INSN (p);
6664 bb->end = end;
6666 #endif /* HAVE_return */
6668 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6669 this into place with notes indicating where the prologue ends and where
6670 the epilogue begins. Update the basic block information when possible. */
6672 void
6673 thread_prologue_and_epilogue_insns (f)
6674 rtx f ATTRIBUTE_UNUSED;
6676 int insertted = 0;
6677 edge e;
6678 rtx seq;
6680 #ifdef HAVE_prologue
6681 if (HAVE_prologue)
6683 rtx insn;
6685 start_sequence ();
6686 seq = gen_prologue();
6687 emit_insn (seq);
6689 /* Retain a map of the prologue insns. */
6690 if (GET_CODE (seq) != SEQUENCE)
6691 seq = get_insns ();
6692 prologue = record_insns (seq);
6693 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6695 /* GDB handles `break f' by setting a breakpoint on the first
6696 line note *after* the prologue. That means that we should
6697 insert a line note here; otherwise, if the next line note
6698 comes part way into the next block, GDB will skip all the way
6699 to that point. */
6700 insn = next_nonnote_insn (f);
6701 while (insn)
6703 if (GET_CODE (insn) == NOTE
6704 && NOTE_LINE_NUMBER (insn) >= 0)
6706 emit_line_note_force (NOTE_SOURCE_FILE (insn),
6707 NOTE_LINE_NUMBER (insn));
6708 break;
6711 insn = PREV_INSN (insn);
6714 seq = gen_sequence ();
6715 end_sequence ();
6717 /* If optimization is off, and perhaps in an empty function,
6718 the entry block will have no successors. */
6719 if (ENTRY_BLOCK_PTR->succ)
6721 /* Can't deal with multiple successsors of the entry block. */
6722 if (ENTRY_BLOCK_PTR->succ->succ_next)
6723 abort ();
6725 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6726 insertted = 1;
6728 else
6729 emit_insn_after (seq, f);
6731 #endif
6733 /* If the exit block has no non-fake predecessors, we don't need
6734 an epilogue. */
6735 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6736 if ((e->flags & EDGE_FAKE) == 0)
6737 break;
6738 if (e == NULL)
6739 goto epilogue_done;
6741 #ifdef HAVE_return
6742 if (optimize && HAVE_return)
6744 /* If we're allowed to generate a simple return instruction,
6745 then by definition we don't need a full epilogue. Examine
6746 the block that falls through to EXIT. If it does not
6747 contain any code, examine its predecessors and try to
6748 emit (conditional) return instructions. */
6750 basic_block last;
6751 edge e_next;
6752 rtx label;
6754 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6755 if (e->flags & EDGE_FALLTHRU)
6756 break;
6757 if (e == NULL)
6758 goto epilogue_done;
6759 last = e->src;
6761 /* Verify that there are no active instructions in the last block. */
6762 label = last->end;
6763 while (label && GET_CODE (label) != CODE_LABEL)
6765 if (active_insn_p (label))
6766 break;
6767 label = PREV_INSN (label);
6770 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6772 for (e = last->pred; e ; e = e_next)
6774 basic_block bb = e->src;
6775 rtx jump;
6777 e_next = e->pred_next;
6778 if (bb == ENTRY_BLOCK_PTR)
6779 continue;
6781 jump = bb->end;
6782 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
6783 continue;
6785 /* If we have an unconditional jump, we can replace that
6786 with a simple return instruction. */
6787 if (simplejump_p (jump))
6789 emit_return_into_block (bb);
6790 flow_delete_insn (jump);
6793 /* If we have a conditional jump, we can try to replace
6794 that with a conditional return instruction. */
6795 else if (condjump_p (jump))
6797 rtx ret, *loc;
6799 ret = SET_SRC (PATTERN (jump));
6800 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
6801 loc = &XEXP (ret, 1);
6802 else
6803 loc = &XEXP (ret, 2);
6804 ret = gen_rtx_RETURN (VOIDmode);
6806 if (! validate_change (jump, loc, ret, 0))
6807 continue;
6808 if (JUMP_LABEL (jump))
6809 LABEL_NUSES (JUMP_LABEL (jump))--;
6811 /* If this block has only one successor, it both jumps
6812 and falls through to the fallthru block, so we can't
6813 delete the edge. */
6814 if (bb->succ->succ_next == NULL)
6815 continue;
6817 else
6818 continue;
6820 /* Fix up the CFG for the successful change we just made. */
6821 remove_edge (e);
6822 make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
6825 /* Emit a return insn for the exit fallthru block. Whether
6826 this is still reachable will be determined later. */
6828 emit_barrier_after (last->end);
6829 emit_return_into_block (last);
6831 else
6833 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6834 as it may be the exit block can go elsewhere as well
6835 as exiting. */
6836 start_sequence ();
6837 emit_jump_insn (gen_return ());
6838 seq = gen_sequence ();
6839 end_sequence ();
6840 insert_insn_on_edge (seq, e);
6841 insertted = 1;
6843 goto epilogue_done;
6845 #endif
6846 #ifdef HAVE_epilogue
6847 if (HAVE_epilogue)
6849 /* Find the edge that falls through to EXIT. Other edges may exist
6850 due to RETURN instructions, but those don't need epilogues.
6851 There really shouldn't be a mixture -- either all should have
6852 been converted or none, however... */
6854 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6855 if (e->flags & EDGE_FALLTHRU)
6856 break;
6857 if (e == NULL)
6858 goto epilogue_done;
6860 start_sequence ();
6861 emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
6863 seq = gen_epilogue ();
6864 emit_jump_insn (seq);
6866 /* Retain a map of the epilogue insns. */
6867 if (GET_CODE (seq) != SEQUENCE)
6868 seq = get_insns ();
6869 epilogue = record_insns (seq);
6871 seq = gen_sequence ();
6872 end_sequence();
6874 insert_insn_on_edge (seq, e);
6875 insertted = 1;
6877 #endif
6878 epilogue_done:
6880 if (insertted)
6881 commit_edge_insertions ();
6884 /* Reposition the prologue-end and epilogue-begin notes after instruction
6885 scheduling and delayed branch scheduling. */
6887 void
6888 reposition_prologue_and_epilogue_notes (f)
6889 rtx f ATTRIBUTE_UNUSED;
6891 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6892 /* Reposition the prologue and epilogue notes. */
6893 if (n_basic_blocks)
6895 int len;
6897 if (prologue)
6899 register rtx insn, note = 0;
6901 /* Scan from the beginning until we reach the last prologue insn.
6902 We apparently can't depend on basic_block_{head,end} after
6903 reorg has run. */
6904 for (len = 0; prologue[len]; len++)
6906 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6908 if (GET_CODE (insn) == NOTE)
6910 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6911 note = insn;
6913 else if ((len -= contains (insn, prologue)) == 0)
6915 rtx next;
6916 /* Find the prologue-end note if we haven't already, and
6917 move it to just after the last prologue insn. */
6918 if (note == 0)
6920 for (note = insn; (note = NEXT_INSN (note));)
6921 if (GET_CODE (note) == NOTE
6922 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6923 break;
6926 next = NEXT_INSN (note);
6928 /* Whether or not we can depend on BLOCK_HEAD,
6929 attempt to keep it up-to-date. */
6930 if (BLOCK_HEAD (0) == note)
6931 BLOCK_HEAD (0) = next;
6933 remove_insn (note);
6934 add_insn_after (note, insn);
6939 if (epilogue)
6941 register rtx insn, note = 0;
6943 /* Scan from the end until we reach the first epilogue insn.
6944 We apparently can't depend on basic_block_{head,end} after
6945 reorg has run. */
6946 for (len = 0; epilogue[len]; len++)
6948 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6950 if (GET_CODE (insn) == NOTE)
6952 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6953 note = insn;
6955 else if ((len -= contains (insn, epilogue)) == 0)
6957 /* Find the epilogue-begin note if we haven't already, and
6958 move it to just before the first epilogue insn. */
6959 if (note == 0)
6961 for (note = insn; (note = PREV_INSN (note));)
6962 if (GET_CODE (note) == NOTE
6963 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6964 break;
6967 /* Whether or not we can depend on BLOCK_HEAD,
6968 attempt to keep it up-to-date. */
6969 if (n_basic_blocks
6970 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6971 BLOCK_HEAD (n_basic_blocks-1) = note;
6973 remove_insn (note);
6974 add_insn_before (note, insn);
6979 #endif /* HAVE_prologue or HAVE_epilogue */
6982 /* Mark T for GC. */
6984 static void
6985 mark_temp_slot (t)
6986 struct temp_slot *t;
6988 while (t)
6990 ggc_mark_rtx (t->slot);
6991 ggc_mark_rtx (t->address);
6992 ggc_mark_tree (t->rtl_expr);
6994 t = t->next;
6998 /* Mark P for GC. */
7000 static void
7001 mark_function_status (p)
7002 struct function *p;
7004 int i;
7005 rtx *r;
7007 if (p == 0)
7008 return;
7010 ggc_mark_rtx (p->arg_offset_rtx);
7012 if (p->x_parm_reg_stack_loc)
7013 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7014 i > 0; --i, ++r)
7015 ggc_mark_rtx (*r);
7017 ggc_mark_rtx (p->return_rtx);
7018 ggc_mark_rtx (p->x_cleanup_label);
7019 ggc_mark_rtx (p->x_return_label);
7020 ggc_mark_rtx (p->x_save_expr_regs);
7021 ggc_mark_rtx (p->x_stack_slot_list);
7022 ggc_mark_rtx (p->x_parm_birth_insn);
7023 ggc_mark_rtx (p->x_tail_recursion_label);
7024 ggc_mark_rtx (p->x_tail_recursion_reentry);
7025 ggc_mark_rtx (p->internal_arg_pointer);
7026 ggc_mark_rtx (p->x_arg_pointer_save_area);
7027 ggc_mark_tree (p->x_rtl_expr_chain);
7028 ggc_mark_rtx (p->x_last_parm_insn);
7029 ggc_mark_tree (p->x_context_display);
7030 ggc_mark_tree (p->x_trampoline_list);
7031 ggc_mark_rtx (p->epilogue_delay_list);
7033 mark_temp_slot (p->x_temp_slots);
7036 struct var_refs_queue *q = p->fixup_var_refs_queue;
7037 while (q)
7039 ggc_mark_rtx (q->modified);
7040 q = q->next;
7044 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7045 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7046 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7047 ggc_mark_tree (p->x_nonlocal_labels);
7050 /* Mark the function chain ARG (which is really a struct function **)
7051 for GC. */
7053 static void
7054 mark_function_chain (arg)
7055 void *arg;
7057 struct function *f = *(struct function **) arg;
7059 for (; f; f = f->next_global)
7061 ggc_mark_tree (f->decl);
7063 mark_function_status (f);
7064 mark_eh_status (f->eh);
7065 mark_stmt_status (f->stmt);
7066 mark_expr_status (f->expr);
7067 mark_emit_status (f->emit);
7068 mark_varasm_status (f->varasm);
7070 if (mark_machine_status)
7071 (*mark_machine_status) (f);
7072 if (mark_lang_status)
7073 (*mark_lang_status) (f);
7075 if (f->original_arg_vector)
7076 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7077 if (f->original_decl_initial)
7078 ggc_mark_tree (f->original_decl_initial);
7082 /* Called once, at initialization, to initialize function.c. */
7084 void
7085 init_function_once ()
7087 ggc_add_root (&all_functions, 1, sizeof all_functions,
7088 mark_function_chain);