* decl.c (grokfndecl): Remove dead code.
[official-gcc.git] / gcc / function.c
bloba93f48b03220f263e6407db0f7ef0ff4d0d11e1c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
108 int current_function_sp_is_unchanging;
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117 static int virtuals_instantiated;
119 /* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
122 void (*init_machine_status) PROTO((struct function *));
123 void (*save_machine_status) PROTO((struct function *));
124 void (*restore_machine_status) PROTO((struct function *));
125 void (*mark_machine_status) PROTO((struct function *));
126 void (*free_machine_status) PROTO((struct function *));
128 /* Likewise, but for language-specific data. */
129 void (*init_lang_status) PROTO((struct function *));
130 void (*save_lang_status) PROTO((struct function *));
131 void (*restore_lang_status) PROTO((struct function *));
132 void (*mark_lang_status) PROTO((struct function *));
133 void (*free_lang_status) PROTO((struct function *));
135 /* The FUNCTION_DECL for an inline function currently being expanded. */
136 tree inline_function_decl;
138 /* The currently compiled function. */
139 struct function *current_function = 0;
141 /* Global list of all compiled functions. */
142 struct function *all_functions = 0;
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static int *prologue;
146 static int *epilogue;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
166 struct temp_slot
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
189 int alias_set;
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 tree rtl_expr;
192 /* Non-zero if this temporary is currently in use. */
193 char in_use;
194 /* Non-zero if this temporary has its address taken. */
195 char addr_taken;
196 /* Nesting level at which this slot is being used. */
197 int level;
198 /* Non-zero if this should survive a call to free_temp_slots. */
199 int keep;
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
202 HOST_WIDE_INT base_offset;
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
205 HOST_WIDE_INT full_size;
208 /* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
213 struct fixup_replacement
215 rtx old;
216 rtx new;
217 struct fixup_replacement *next;
220 struct insns_for_mem_entry {
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
232 int, tree));
233 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
234 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
235 enum machine_mode, enum machine_mode,
236 int, int, int,
237 struct hash_table *));
238 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
239 struct hash_table *));
240 static struct fixup_replacement
241 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
242 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
243 rtx, int, struct hash_table *));
244 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **));
246 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
247 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
248 static rtx fixup_stack_1 PROTO((rtx, rtx));
249 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
250 static void instantiate_decls PROTO((tree, int));
251 static void instantiate_decls_1 PROTO((tree, int));
252 static void instantiate_decl PROTO((rtx, int, int));
253 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
254 static void delete_handlers PROTO((void));
255 static void pad_to_arg_alignment PROTO((struct args_size *, int, struct args_size *));
256 #ifndef ARGS_GROW_DOWNWARD
257 static void pad_below PROTO((struct args_size *, enum machine_mode,
258 tree));
259 #endif
260 #ifdef ARGS_GROW_DOWNWARD
261 static tree round_down PROTO((tree, int));
262 #endif
263 static rtx round_trampoline_addr PROTO((rtx));
264 static tree blocks_nreverse PROTO((tree));
265 static int all_blocks PROTO((tree, tree *));
266 /* We always define `record_insns' even if its not used so that we
267 can always export `prologue_epilogue_contains'. */
268 static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
269 static int contains PROTO((rtx, int *));
270 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
271 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
272 struct hash_table *));
273 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
274 struct hash_table *,
275 hash_table_key));
276 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
277 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
278 static int insns_for_mem_walk PROTO ((rtx *, void *));
279 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
280 static void mark_temp_slot PROTO ((struct temp_slot *));
281 static void mark_function_status PROTO ((struct function *));
282 static void mark_function_chain PROTO ((void *));
283 static void prepare_function_start PROTO ((void));
286 /* Pointer to chain of `struct function' for containing functions. */
287 struct function *outer_function_chain;
289 /* Given a function decl for a containing function,
290 return the `struct function' for it. */
292 struct function *
293 find_function_data (decl)
294 tree decl;
296 struct function *p;
298 for (p = outer_function_chain; p; p = p->next)
299 if (p->decl == decl)
300 return p;
302 abort ();
305 /* Save the current context for compilation of a nested function.
306 This is called from language-specific code. The caller should use
307 the save_lang_status callback to save any language-specific state,
308 since this function knows only about language-independent
309 variables. */
311 void
312 push_function_context_to (context)
313 tree context;
315 struct function *p, *context_data;
317 if (context)
319 context_data = (context == current_function_decl
320 ? current_function
321 : find_function_data (context));
322 context_data->contains_functions = 1;
325 if (current_function == 0)
326 init_dummy_function_start ();
327 p = current_function;
329 p->next = outer_function_chain;
330 outer_function_chain = p;
331 p->fixup_var_refs_queue = 0;
333 save_tree_status (p);
334 if (save_lang_status)
335 (*save_lang_status) (p);
336 if (save_machine_status)
337 (*save_machine_status) (p);
339 current_function = 0;
342 void
343 push_function_context ()
345 push_function_context_to (current_function_decl);
348 /* Restore the last saved context, at the end of a nested function.
349 This function is called from language-specific code. */
351 void
352 pop_function_context_from (context)
353 tree context ATTRIBUTE_UNUSED;
355 struct function *p = outer_function_chain;
356 struct var_refs_queue *queue;
357 struct var_refs_queue *next;
359 current_function = p;
360 outer_function_chain = p->next;
362 current_function_decl = p->decl;
363 reg_renumber = 0;
365 restore_tree_status (p);
366 restore_emit_status (p);
368 if (restore_machine_status)
369 (*restore_machine_status) (p);
370 if (restore_lang_status)
371 (*restore_lang_status) (p);
373 /* Finish doing put_var_into_stack for any of our variables
374 which became addressable during the nested function. */
375 for (queue = p->fixup_var_refs_queue; queue; queue = next)
377 next = queue->next;
378 fixup_var_refs (queue->modified, queue->promoted_mode,
379 queue->unsignedp, 0);
380 free (queue);
382 p->fixup_var_refs_queue = 0;
384 /* Reset variables that have known state during rtx generation. */
385 rtx_equal_function_value_matters = 1;
386 virtuals_instantiated = 0;
389 void
390 pop_function_context ()
392 pop_function_context_from (current_function_decl);
395 /* Clear out all parts of the state in F that can safely be discarded
396 after the function has been parsed, but not compiled, to let
397 garbage collection reclaim the memory. */
399 void
400 free_after_parsing (f)
401 struct function *f;
403 /* f->expr->forced_labels is used by code generation. */
404 /* f->emit->regno_reg_rtx is used by code generation. */
405 /* f->varasm is used by code generation. */
406 /* f->eh->eh_return_stub_label is used by code generation. */
408 if (free_lang_status)
409 (*free_lang_status) (f);
410 free_stmt_status (f);
413 /* Clear out all parts of the state in F that can safely be discarded
414 after the function has been compiled, to let garbage collection
415 reclaim the memory. */
417 void
418 free_after_compilation (f)
419 struct function *f;
421 free_eh_status (f);
422 free_expr_status (f);
423 free_emit_status (f);
424 free_varasm_status (f);
426 if (free_machine_status)
427 (*free_machine_status) (f);
429 if (f->x_parm_reg_stack_loc)
430 free (f->x_parm_reg_stack_loc);
432 f->arg_offset_rtx = NULL;
433 f->return_rtx = NULL;
434 f->internal_arg_pointer = NULL;
435 f->x_nonlocal_labels = NULL;
436 f->x_nonlocal_goto_handler_slots = NULL;
437 f->x_nonlocal_goto_handler_labels = NULL;
438 f->x_nonlocal_goto_stack_level = NULL;
439 f->x_cleanup_label = NULL;
440 f->x_return_label = NULL;
441 f->x_save_expr_regs = NULL;
442 f->x_stack_slot_list = NULL;
443 f->x_rtl_expr_chain = NULL;
444 f->x_tail_recursion_label = NULL;
445 f->x_tail_recursion_reentry = NULL;
446 f->x_arg_pointer_save_area = NULL;
447 f->x_context_display = NULL;
448 f->x_trampoline_list = NULL;
449 f->x_parm_birth_insn = NULL;
450 f->x_last_parm_insn = NULL;
451 f->x_parm_reg_stack_loc = NULL;
452 f->x_temp_slots = NULL;
453 f->fixup_var_refs_queue = NULL;
454 f->original_arg_vector = NULL;
455 f->original_decl_initial = NULL;
456 f->inl_last_parm_insn = NULL;
457 f->epilogue_delay_list = NULL;
461 /* Allocate fixed slots in the stack frame of the current function. */
463 /* Return size needed for stack frame based on slots so far allocated in
464 function F.
465 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
466 the caller may have to do that. */
468 HOST_WIDE_INT
469 get_func_frame_size (f)
470 struct function *f;
472 #ifdef FRAME_GROWS_DOWNWARD
473 return -f->x_frame_offset;
474 #else
475 return f->x_frame_offset;
476 #endif
479 /* Return size needed for stack frame based on slots so far allocated.
480 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
481 the caller may have to do that. */
482 HOST_WIDE_INT
483 get_frame_size ()
485 return get_func_frame_size (current_function);
488 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
489 with machine mode MODE.
491 ALIGN controls the amount of alignment for the address of the slot:
492 0 means according to MODE,
493 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
494 positive specifies alignment boundary in bits.
496 We do not round to stack_boundary here.
498 FUNCTION specifies the function to allocate in. */
500 static rtx
501 assign_stack_local_1 (mode, size, align, function)
502 enum machine_mode mode;
503 HOST_WIDE_INT size;
504 int align;
505 struct function *function;
507 register rtx x, addr;
508 int bigend_correction = 0;
509 int alignment;
511 /* Allocate in the memory associated with the function in whose frame
512 we are assigning. */
513 if (function != current_function)
514 push_obstacks (function->function_obstack,
515 function->function_maybepermanent_obstack);
517 if (align == 0)
519 tree type;
521 alignment = GET_MODE_ALIGNMENT (mode);
522 if (mode == BLKmode)
523 alignment = BIGGEST_ALIGNMENT;
525 /* Allow the target to (possibly) increase the alignment of this
526 stack slot. */
527 type = type_for_mode (mode, 0);
528 if (type)
529 alignment = LOCAL_ALIGNMENT (type, alignment);
531 alignment /= BITS_PER_UNIT;
533 else if (align == -1)
535 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536 size = CEIL_ROUND (size, alignment);
538 else
539 alignment = align / BITS_PER_UNIT;
541 #ifdef FRAME_GROWS_DOWNWARD
542 function->x_frame_offset -= size;
543 #endif
545 /* Round frame offset to that alignment.
546 We must be careful here, since FRAME_OFFSET might be negative and
547 division with a negative dividend isn't as well defined as we might
548 like. So we instead assume that ALIGNMENT is a power of two and
549 use logical operations which are unambiguous. */
550 #ifdef FRAME_GROWS_DOWNWARD
551 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
552 #else
553 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
554 #endif
556 /* On a big-endian machine, if we are allocating more space than we will use,
557 use the least significant bytes of those that are allocated. */
558 if (BYTES_BIG_ENDIAN && mode != BLKmode)
559 bigend_correction = size - GET_MODE_SIZE (mode);
561 /* If we have already instantiated virtual registers, return the actual
562 address relative to the frame pointer. */
563 if (function == current_function && virtuals_instantiated)
564 addr = plus_constant (frame_pointer_rtx,
565 (frame_offset + bigend_correction
566 + STARTING_FRAME_OFFSET));
567 else
568 addr = plus_constant (virtual_stack_vars_rtx,
569 function->x_frame_offset + bigend_correction);
571 #ifndef FRAME_GROWS_DOWNWARD
572 function->x_frame_offset += size;
573 #endif
575 x = gen_rtx_MEM (mode, addr);
577 function->x_stack_slot_list
578 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
580 if (function != current_function)
581 pop_obstacks ();
583 return x;
586 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
587 current function. */
589 assign_stack_local (mode, size, align)
590 enum machine_mode mode;
591 HOST_WIDE_INT size;
592 int align;
594 return assign_stack_local_1 (mode, size, align, current_function);
597 /* Allocate a temporary stack slot and record it for possible later
598 reuse.
600 MODE is the machine mode to be given to the returned rtx.
602 SIZE is the size in units of the space required. We do no rounding here
603 since assign_stack_local will do any required rounding.
605 KEEP is 1 if this slot is to be retained after a call to
606 free_temp_slots. Automatic variables for a block are allocated
607 with this flag. KEEP is 2 if we allocate a longer term temporary,
608 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
609 if we are to allocate something at an inner level to be treated as
610 a variable in the block (e.g., a SAVE_EXPR).
612 TYPE is the type that will be used for the stack slot. */
614 static rtx
615 assign_stack_temp_for_type (mode, size, keep, type)
616 enum machine_mode mode;
617 HOST_WIDE_INT size;
618 int keep;
619 tree type;
621 int align;
622 int alias_set;
623 struct temp_slot *p, *best_p = 0;
625 /* If SIZE is -1 it means that somebody tried to allocate a temporary
626 of a variable size. */
627 if (size == -1)
628 abort ();
630 /* If we know the alias set for the memory that will be used, use
631 it. If there's no TYPE, then we don't know anything about the
632 alias set for the memory. */
633 if (type)
634 alias_set = get_alias_set (type);
635 else
636 alias_set = 0;
638 align = GET_MODE_ALIGNMENT (mode);
639 if (mode == BLKmode)
640 align = BIGGEST_ALIGNMENT;
642 if (! type)
643 type = type_for_mode (mode, 0);
644 if (type)
645 align = LOCAL_ALIGNMENT (type, align);
647 /* Try to find an available, already-allocated temporary of the proper
648 mode which meets the size and alignment requirements. Choose the
649 smallest one with the closest alignment. */
650 for (p = temp_slots; p; p = p->next)
651 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
652 && ! p->in_use
653 && (!flag_strict_aliasing
654 || (alias_set && p->alias_set == alias_set))
655 && (best_p == 0 || best_p->size > p->size
656 || (best_p->size == p->size && best_p->align > p->align)))
658 if (p->align == align && p->size == size)
660 best_p = 0;
661 break;
663 best_p = p;
666 /* Make our best, if any, the one to use. */
667 if (best_p)
669 /* If there are enough aligned bytes left over, make them into a new
670 temp_slot so that the extra bytes don't get wasted. Do this only
671 for BLKmode slots, so that we can be sure of the alignment. */
672 if (GET_MODE (best_p->slot) == BLKmode
673 /* We can't split slots if -fstrict-aliasing because the
674 information about the alias set for the new slot will be
675 lost. */
676 && !flag_strict_aliasing)
678 int alignment = best_p->align / BITS_PER_UNIT;
679 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
681 if (best_p->size - rounded_size >= alignment)
683 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
684 p->in_use = p->addr_taken = 0;
685 p->size = best_p->size - rounded_size;
686 p->base_offset = best_p->base_offset + rounded_size;
687 p->full_size = best_p->full_size - rounded_size;
688 p->slot = gen_rtx_MEM (BLKmode,
689 plus_constant (XEXP (best_p->slot, 0),
690 rounded_size));
691 p->align = best_p->align;
692 p->address = 0;
693 p->rtl_expr = 0;
694 p->next = temp_slots;
695 temp_slots = p;
697 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
698 stack_slot_list);
700 best_p->size = rounded_size;
701 best_p->full_size = rounded_size;
705 p = best_p;
708 /* If we still didn't find one, make a new temporary. */
709 if (p == 0)
711 HOST_WIDE_INT frame_offset_old = frame_offset;
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
715 /* We are passing an explicit alignment request to assign_stack_local.
716 One side effect of that is assign_stack_local will not round SIZE
717 to ensure the frame offset remains suitably aligned.
719 So for requests which depended on the rounding of SIZE, we go ahead
720 and round it now. We also make sure ALIGNMENT is at least
721 BIGGEST_ALIGNMENT. */
722 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
723 abort();
724 p->slot = assign_stack_local (mode,
725 (mode == BLKmode
726 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
727 : size),
728 align);
730 p->align = align;
731 p->alias_set = alias_set;
733 /* The following slot size computation is necessary because we don't
734 know the actual size of the temporary slot until assign_stack_local
735 has performed all the frame alignment and size rounding for the
736 requested temporary. Note that extra space added for alignment
737 can be either above or below this stack slot depending on which
738 way the frame grows. We include the extra space if and only if it
739 is above this slot. */
740 #ifdef FRAME_GROWS_DOWNWARD
741 p->size = frame_offset_old - frame_offset;
742 #else
743 p->size = size;
744 #endif
746 /* Now define the fields used by combine_temp_slots. */
747 #ifdef FRAME_GROWS_DOWNWARD
748 p->base_offset = frame_offset;
749 p->full_size = frame_offset_old - frame_offset;
750 #else
751 p->base_offset = frame_offset_old;
752 p->full_size = frame_offset - frame_offset_old;
753 #endif
754 p->address = 0;
755 p->next = temp_slots;
756 temp_slots = p;
759 p->in_use = 1;
760 p->addr_taken = 0;
761 p->rtl_expr = seq_rtl_expr;
763 if (keep == 2)
765 p->level = target_temp_slot_level;
766 p->keep = 0;
768 else if (keep == 3)
770 p->level = var_temp_slot_level;
771 p->keep = 0;
773 else
775 p->level = temp_slot_level;
776 p->keep = keep;
779 /* We may be reusing an old slot, so clear any MEM flags that may have been
780 set from before. */
781 RTX_UNCHANGING_P (p->slot) = 0;
782 MEM_IN_STRUCT_P (p->slot) = 0;
783 MEM_SCALAR_P (p->slot) = 0;
784 MEM_ALIAS_SET (p->slot) = 0;
785 return p->slot;
788 /* Allocate a temporary stack slot and record it for possible later
789 reuse. First three arguments are same as in preceding function. */
792 assign_stack_temp (mode, size, keep)
793 enum machine_mode mode;
794 HOST_WIDE_INT size;
795 int keep;
797 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
800 /* Assign a temporary of given TYPE.
801 KEEP is as for assign_stack_temp.
802 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
803 it is 0 if a register is OK.
804 DONT_PROMOTE is 1 if we should not promote values in register
805 to wider modes. */
808 assign_temp (type, keep, memory_required, dont_promote)
809 tree type;
810 int keep;
811 int memory_required;
812 int dont_promote;
814 enum machine_mode mode = TYPE_MODE (type);
815 int unsignedp = TREE_UNSIGNED (type);
817 if (mode == BLKmode || memory_required)
819 HOST_WIDE_INT size = int_size_in_bytes (type);
820 rtx tmp;
822 /* Unfortunately, we don't yet know how to allocate variable-sized
823 temporaries. However, sometimes we have a fixed upper limit on
824 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
825 instead. This is the case for Chill variable-sized strings. */
826 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
827 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
828 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
829 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
831 tmp = assign_stack_temp_for_type (mode, size, keep, type);
832 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
833 return tmp;
836 #ifndef PROMOTE_FOR_CALL_ONLY
837 if (! dont_promote)
838 mode = promote_mode (type, mode, &unsignedp, 0);
839 #endif
841 return gen_reg_rtx (mode);
844 /* Combine temporary stack slots which are adjacent on the stack.
846 This allows for better use of already allocated stack space. This is only
847 done for BLKmode slots because we can be sure that we won't have alignment
848 problems in this case. */
850 void
851 combine_temp_slots ()
853 struct temp_slot *p, *q;
854 struct temp_slot *prev_p, *prev_q;
855 int num_slots;
857 /* We can't combine slots, because the information about which slot
858 is in which alias set will be lost. */
859 if (flag_strict_aliasing)
860 return;
862 /* If there are a lot of temp slots, don't do anything unless
863 high levels of optimizaton. */
864 if (! flag_expensive_optimizations)
865 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
866 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
867 return;
869 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
871 int delete_p = 0;
873 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
874 for (q = p->next, prev_q = p; q; q = prev_q->next)
876 int delete_q = 0;
877 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
879 if (p->base_offset + p->full_size == q->base_offset)
881 /* Q comes after P; combine Q into P. */
882 p->size += q->size;
883 p->full_size += q->full_size;
884 delete_q = 1;
886 else if (q->base_offset + q->full_size == p->base_offset)
888 /* P comes after Q; combine P into Q. */
889 q->size += p->size;
890 q->full_size += p->full_size;
891 delete_p = 1;
892 break;
895 /* Either delete Q or advance past it. */
896 if (delete_q)
897 prev_q->next = q->next;
898 else
899 prev_q = q;
901 /* Either delete P or advance past it. */
902 if (delete_p)
904 if (prev_p)
905 prev_p->next = p->next;
906 else
907 temp_slots = p->next;
909 else
910 prev_p = p;
914 /* Find the temp slot corresponding to the object at address X. */
916 static struct temp_slot *
917 find_temp_slot_from_address (x)
918 rtx x;
920 struct temp_slot *p;
921 rtx next;
923 for (p = temp_slots; p; p = p->next)
925 if (! p->in_use)
926 continue;
928 else if (XEXP (p->slot, 0) == x
929 || p->address == x
930 || (GET_CODE (x) == PLUS
931 && XEXP (x, 0) == virtual_stack_vars_rtx
932 && GET_CODE (XEXP (x, 1)) == CONST_INT
933 && INTVAL (XEXP (x, 1)) >= p->base_offset
934 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
935 return p;
937 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
938 for (next = p->address; next; next = XEXP (next, 1))
939 if (XEXP (next, 0) == x)
940 return p;
943 /* If we have a sum involving a register, see if it points to a temp
944 slot. */
945 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
946 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
947 return p;
948 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
949 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
950 return p;
952 return 0;
955 /* Indicate that NEW is an alternate way of referring to the temp slot
956 that previously was known by OLD. */
958 void
959 update_temp_slot_address (old, new)
960 rtx old, new;
962 struct temp_slot *p;
964 if (rtx_equal_p (old, new))
965 return;
967 p = find_temp_slot_from_address (old);
969 /* If we didn't find one, see if both OLD and NEW are a PLUS and if
970 there is a register in common between them. If so, try a recursive
971 call on those values. */
972 if (p == 0)
974 if (GET_CODE (old) != PLUS || GET_CODE (new) != PLUS)
975 return;
977 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
978 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
979 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
980 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
981 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
982 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
983 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
984 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
986 return;
989 /* Otherwise add an alias for the temp's address. */
990 else if (p->address == 0)
991 p->address = new;
992 else
994 if (GET_CODE (p->address) != EXPR_LIST)
995 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
997 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1001 /* If X could be a reference to a temporary slot, mark the fact that its
1002 address was taken. */
1004 void
1005 mark_temp_addr_taken (x)
1006 rtx x;
1008 struct temp_slot *p;
1010 if (x == 0)
1011 return;
1013 /* If X is not in memory or is at a constant address, it cannot be in
1014 a temporary slot. */
1015 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1016 return;
1018 p = find_temp_slot_from_address (XEXP (x, 0));
1019 if (p != 0)
1020 p->addr_taken = 1;
1023 /* If X could be a reference to a temporary slot, mark that slot as
1024 belonging to the to one level higher than the current level. If X
1025 matched one of our slots, just mark that one. Otherwise, we can't
1026 easily predict which it is, so upgrade all of them. Kept slots
1027 need not be touched.
1029 This is called when an ({...}) construct occurs and a statement
1030 returns a value in memory. */
1032 void
1033 preserve_temp_slots (x)
1034 rtx x;
1036 struct temp_slot *p = 0;
1038 /* If there is no result, we still might have some objects whose address
1039 were taken, so we need to make sure they stay around. */
1040 if (x == 0)
1042 for (p = temp_slots; p; p = p->next)
1043 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1044 p->level--;
1046 return;
1049 /* If X is a register that is being used as a pointer, see if we have
1050 a temporary slot we know it points to. To be consistent with
1051 the code below, we really should preserve all non-kept slots
1052 if we can't find a match, but that seems to be much too costly. */
1053 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1054 p = find_temp_slot_from_address (x);
1056 /* If X is not in memory or is at a constant address, it cannot be in
1057 a temporary slot, but it can contain something whose address was
1058 taken. */
1059 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1061 for (p = temp_slots; p; p = p->next)
1062 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1063 p->level--;
1065 return;
1068 /* First see if we can find a match. */
1069 if (p == 0)
1070 p = find_temp_slot_from_address (XEXP (x, 0));
1072 if (p != 0)
1074 /* Move everything at our level whose address was taken to our new
1075 level in case we used its address. */
1076 struct temp_slot *q;
1078 if (p->level == temp_slot_level)
1080 for (q = temp_slots; q; q = q->next)
1081 if (q != p && q->addr_taken && q->level == p->level)
1082 q->level--;
1084 p->level--;
1085 p->addr_taken = 0;
1087 return;
1090 /* Otherwise, preserve all non-kept slots at this level. */
1091 for (p = temp_slots; p; p = p->next)
1092 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1093 p->level--;
1096 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1097 with that RTL_EXPR, promote it into a temporary slot at the present
1098 level so it will not be freed when we free slots made in the
1099 RTL_EXPR. */
1101 void
1102 preserve_rtl_expr_result (x)
1103 rtx x;
1105 struct temp_slot *p;
1107 /* If X is not in memory or is at a constant address, it cannot be in
1108 a temporary slot. */
1109 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1110 return;
1112 /* If we can find a match, move it to our level unless it is already at
1113 an upper level. */
1114 p = find_temp_slot_from_address (XEXP (x, 0));
1115 if (p != 0)
1117 p->level = MIN (p->level, temp_slot_level);
1118 p->rtl_expr = 0;
1121 return;
1124 /* Free all temporaries used so far. This is normally called at the end
1125 of generating code for a statement. Don't free any temporaries
1126 currently in use for an RTL_EXPR that hasn't yet been emitted.
1127 We could eventually do better than this since it can be reused while
1128 generating the same RTL_EXPR, but this is complex and probably not
1129 worthwhile. */
1131 void
1132 free_temp_slots ()
1134 struct temp_slot *p;
1136 for (p = temp_slots; p; p = p->next)
1137 if (p->in_use && p->level == temp_slot_level && ! p->keep
1138 && p->rtl_expr == 0)
1139 p->in_use = 0;
1141 combine_temp_slots ();
1144 /* Free all temporary slots used in T, an RTL_EXPR node. */
1146 void
1147 free_temps_for_rtl_expr (t)
1148 tree t;
1150 struct temp_slot *p;
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->rtl_expr == t)
1154 p->in_use = 0;
1156 combine_temp_slots ();
1159 /* Mark all temporaries ever allocated in this function as not suitable
1160 for reuse until the current level is exited. */
1162 void
1163 mark_all_temps_used ()
1165 struct temp_slot *p;
1167 for (p = temp_slots; p; p = p->next)
1169 p->in_use = p->keep = 1;
1170 p->level = MIN (p->level, temp_slot_level);
1174 /* Push deeper into the nesting level for stack temporaries. */
1176 void
1177 push_temp_slots ()
1179 temp_slot_level++;
1182 /* Likewise, but save the new level as the place to allocate variables
1183 for blocks. */
1185 #if 0
1186 void
1187 push_temp_slots_for_block ()
1189 push_temp_slots ();
1191 var_temp_slot_level = temp_slot_level;
1194 /* Likewise, but save the new level as the place to allocate temporaries
1195 for TARGET_EXPRs. */
1197 void
1198 push_temp_slots_for_target ()
1200 push_temp_slots ();
1202 target_temp_slot_level = temp_slot_level;
1205 /* Set and get the value of target_temp_slot_level. The only
1206 permitted use of these functions is to save and restore this value. */
1209 get_target_temp_slot_level ()
1211 return target_temp_slot_level;
1214 void
1215 set_target_temp_slot_level (level)
1216 int level;
1218 target_temp_slot_level = level;
1220 #endif
1222 /* Pop a temporary nesting level. All slots in use in the current level
1223 are freed. */
1225 void
1226 pop_temp_slots ()
1228 struct temp_slot *p;
1230 for (p = temp_slots; p; p = p->next)
1231 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1232 p->in_use = 0;
1234 combine_temp_slots ();
1236 temp_slot_level--;
1239 /* Initialize temporary slots. */
1241 void
1242 init_temp_slots ()
1244 /* We have not allocated any temporaries yet. */
1245 temp_slots = 0;
1246 temp_slot_level = 0;
1247 var_temp_slot_level = 0;
1248 target_temp_slot_level = 0;
1251 /* Retroactively move an auto variable from a register to a stack slot.
1252 This is done when an address-reference to the variable is seen. */
1254 void
1255 put_var_into_stack (decl)
1256 tree decl;
1258 register rtx reg;
1259 enum machine_mode promoted_mode, decl_mode;
1260 struct function *function = 0;
1261 tree context;
1262 int can_use_addressof;
1264 context = decl_function_context (decl);
1266 /* Get the current rtl used for this object and its original mode. */
1267 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1269 /* No need to do anything if decl has no rtx yet
1270 since in that case caller is setting TREE_ADDRESSABLE
1271 and a stack slot will be assigned when the rtl is made. */
1272 if (reg == 0)
1273 return;
1275 /* Get the declared mode for this object. */
1276 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1277 : DECL_MODE (decl));
1278 /* Get the mode it's actually stored in. */
1279 promoted_mode = GET_MODE (reg);
1281 /* If this variable comes from an outer function,
1282 find that function's saved context. */
1283 if (context != current_function_decl && context != inline_function_decl)
1284 for (function = outer_function_chain; function; function = function->next)
1285 if (function->decl == context)
1286 break;
1288 /* If this is a variable-size object with a pseudo to address it,
1289 put that pseudo into the stack, if the var is nonlocal. */
1290 if (DECL_NONLOCAL (decl)
1291 && GET_CODE (reg) == MEM
1292 && GET_CODE (XEXP (reg, 0)) == REG
1293 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1295 reg = XEXP (reg, 0);
1296 decl_mode = promoted_mode = GET_MODE (reg);
1299 can_use_addressof
1300 = (function == 0
1301 && optimize > 0
1302 /* FIXME make it work for promoted modes too */
1303 && decl_mode == promoted_mode
1304 #ifdef NON_SAVING_SETJMP
1305 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1306 #endif
1309 /* If we can't use ADDRESSOF, make sure we see through one we already
1310 generated. */
1311 if (! can_use_addressof && GET_CODE (reg) == MEM
1312 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1313 reg = XEXP (XEXP (reg, 0), 0);
1315 /* Now we should have a value that resides in one or more pseudo regs. */
1317 if (GET_CODE (reg) == REG)
1319 /* If this variable lives in the current function and we don't need
1320 to put things in the stack for the sake of setjmp, try to keep it
1321 in a register until we know we actually need the address. */
1322 if (can_use_addressof)
1323 gen_mem_addressof (reg, decl);
1324 else
1325 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1326 promoted_mode, decl_mode,
1327 TREE_SIDE_EFFECTS (decl), 0,
1328 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1331 else if (GET_CODE (reg) == CONCAT)
1333 /* A CONCAT contains two pseudos; put them both in the stack.
1334 We do it so they end up consecutive. */
1335 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1336 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1337 #ifdef FRAME_GROWS_DOWNWARD
1338 /* Since part 0 should have a lower address, do it second. */
1339 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1340 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1341 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1343 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1344 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1345 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1347 #else
1348 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1349 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1350 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1352 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1353 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1354 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1356 #endif
1358 /* Change the CONCAT into a combined MEM for both parts. */
1359 PUT_CODE (reg, MEM);
1360 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1361 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1363 /* The two parts are in memory order already.
1364 Use the lower parts address as ours. */
1365 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1366 /* Prevent sharing of rtl that might lose. */
1367 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1368 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1370 else
1371 return;
1373 if (current_function_check_memory_usage)
1374 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1375 XEXP (reg, 0), Pmode,
1376 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1377 TYPE_MODE (sizetype),
1378 GEN_INT (MEMORY_USE_RW),
1379 TYPE_MODE (integer_type_node));
1382 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1383 into the stack frame of FUNCTION (0 means the current function).
1384 DECL_MODE is the machine mode of the user-level data type.
1385 PROMOTED_MODE is the machine mode of the register.
1386 VOLATILE_P is nonzero if this is for a "volatile" decl.
1387 USED_P is nonzero if this reg might have already been used in an insn. */
1389 static void
1390 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1391 original_regno, used_p, ht)
1392 struct function *function;
1393 rtx reg;
1394 tree type;
1395 enum machine_mode promoted_mode, decl_mode;
1396 int volatile_p;
1397 int original_regno;
1398 int used_p;
1399 struct hash_table *ht;
1401 struct function *func = function ? function : current_function;
1402 rtx new = 0;
1403 int regno = original_regno;
1405 if (regno == 0)
1406 regno = REGNO (reg);
1408 if (regno < func->x_max_parm_reg)
1409 new = func->x_parm_reg_stack_loc[regno];
1410 if (new == 0)
1411 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1413 PUT_CODE (reg, MEM);
1414 PUT_MODE (reg, decl_mode);
1415 XEXP (reg, 0) = XEXP (new, 0);
1416 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1417 MEM_VOLATILE_P (reg) = volatile_p;
1419 /* If this is a memory ref that contains aggregate components,
1420 mark it as such for cse and loop optimize. If we are reusing a
1421 previously generated stack slot, then we need to copy the bit in
1422 case it was set for other reasons. For instance, it is set for
1423 __builtin_va_alist. */
1424 MEM_SET_IN_STRUCT_P (reg,
1425 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1426 MEM_ALIAS_SET (reg) = get_alias_set (type);
1428 /* Now make sure that all refs to the variable, previously made
1429 when it was a register, are fixed up to be valid again. */
1431 if (used_p && function != 0)
1433 struct var_refs_queue *temp;
1435 temp
1436 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1437 temp->modified = reg;
1438 temp->promoted_mode = promoted_mode;
1439 temp->unsignedp = TREE_UNSIGNED (type);
1440 temp->next = function->fixup_var_refs_queue;
1441 function->fixup_var_refs_queue = temp;
1443 else if (used_p)
1444 /* Variable is local; fix it up now. */
1445 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1448 static void
1449 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1450 rtx var;
1451 enum machine_mode promoted_mode;
1452 int unsignedp;
1453 struct hash_table *ht;
1455 tree pending;
1456 rtx first_insn = get_insns ();
1457 struct sequence_stack *stack = seq_stack;
1458 tree rtl_exps = rtl_expr_chain;
1460 /* Must scan all insns for stack-refs that exceed the limit. */
1461 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1462 stack == 0, ht);
1463 /* If there's a hash table, it must record all uses of VAR. */
1464 if (ht)
1465 return;
1467 /* Scan all pending sequences too. */
1468 for (; stack; stack = stack->next)
1470 push_to_sequence (stack->first);
1471 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1472 stack->first, stack->next != 0, 0);
1473 /* Update remembered end of sequence
1474 in case we added an insn at the end. */
1475 stack->last = get_last_insn ();
1476 end_sequence ();
1479 /* Scan all waiting RTL_EXPRs too. */
1480 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1482 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1483 if (seq != const0_rtx && seq != 0)
1485 push_to_sequence (seq);
1486 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1488 end_sequence ();
1492 /* Scan the catch clauses for exception handling too. */
1493 push_to_sequence (catch_clauses);
1494 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1495 0, 0);
1496 end_sequence ();
1499 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1500 some part of an insn. Return a struct fixup_replacement whose OLD
1501 value is equal to X. Allocate a new structure if no such entry exists. */
1503 static struct fixup_replacement *
1504 find_fixup_replacement (replacements, x)
1505 struct fixup_replacement **replacements;
1506 rtx x;
1508 struct fixup_replacement *p;
1510 /* See if we have already replaced this. */
1511 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1514 if (p == 0)
1516 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1517 p->old = x;
1518 p->new = 0;
1519 p->next = *replacements;
1520 *replacements = p;
1523 return p;
1526 /* Scan the insn-chain starting with INSN for refs to VAR
1527 and fix them up. TOPLEVEL is nonzero if this chain is the
1528 main chain of insns for the current function. */
1530 static void
1531 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1532 rtx var;
1533 enum machine_mode promoted_mode;
1534 int unsignedp;
1535 rtx insn;
1536 int toplevel;
1537 struct hash_table *ht;
1539 rtx call_dest = 0;
1540 rtx insn_list = NULL_RTX;
1542 /* If we already know which INSNs reference VAR there's no need
1543 to walk the entire instruction chain. */
1544 if (ht)
1546 insn_list = ((struct insns_for_mem_entry *)
1547 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1548 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1549 insn_list = XEXP (insn_list, 1);
1552 while (insn)
1554 rtx next = NEXT_INSN (insn);
1555 rtx set, prev, prev_set;
1556 rtx note;
1558 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1560 /* Remember the notes in case we delete the insn. */
1561 note = REG_NOTES (insn);
1563 /* If this is a CLOBBER of VAR, delete it.
1565 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1566 and REG_RETVAL notes too. */
1567 if (GET_CODE (PATTERN (insn)) == CLOBBER
1568 && (XEXP (PATTERN (insn), 0) == var
1569 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1570 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1571 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1573 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1574 /* The REG_LIBCALL note will go away since we are going to
1575 turn INSN into a NOTE, so just delete the
1576 corresponding REG_RETVAL note. */
1577 remove_note (XEXP (note, 0),
1578 find_reg_note (XEXP (note, 0), REG_RETVAL,
1579 NULL_RTX));
1581 /* In unoptimized compilation, we shouldn't call delete_insn
1582 except in jump.c doing warnings. */
1583 PUT_CODE (insn, NOTE);
1584 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1585 NOTE_SOURCE_FILE (insn) = 0;
1588 /* The insn to load VAR from a home in the arglist
1589 is now a no-op. When we see it, just delete it.
1590 Similarly if this is storing VAR from a register from which
1591 it was loaded in the previous insn. This will occur
1592 when an ADDRESSOF was made for an arglist slot. */
1593 else if (toplevel
1594 && (set = single_set (insn)) != 0
1595 && SET_DEST (set) == var
1596 /* If this represents the result of an insn group,
1597 don't delete the insn. */
1598 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1599 && (rtx_equal_p (SET_SRC (set), var)
1600 || (GET_CODE (SET_SRC (set)) == REG
1601 && (prev = prev_nonnote_insn (insn)) != 0
1602 && (prev_set = single_set (prev)) != 0
1603 && SET_DEST (prev_set) == SET_SRC (set)
1604 && rtx_equal_p (SET_SRC (prev_set), var))))
1606 /* In unoptimized compilation, we shouldn't call delete_insn
1607 except in jump.c doing warnings. */
1608 PUT_CODE (insn, NOTE);
1609 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1610 NOTE_SOURCE_FILE (insn) = 0;
1611 if (insn == last_parm_insn)
1612 last_parm_insn = PREV_INSN (next);
1614 else
1616 struct fixup_replacement *replacements = 0;
1617 rtx next_insn = NEXT_INSN (insn);
1619 if (SMALL_REGISTER_CLASSES)
1621 /* If the insn that copies the results of a CALL_INSN
1622 into a pseudo now references VAR, we have to use an
1623 intermediate pseudo since we want the life of the
1624 return value register to be only a single insn.
1626 If we don't use an intermediate pseudo, such things as
1627 address computations to make the address of VAR valid
1628 if it is not can be placed between the CALL_INSN and INSN.
1630 To make sure this doesn't happen, we record the destination
1631 of the CALL_INSN and see if the next insn uses both that
1632 and VAR. */
1634 if (call_dest != 0 && GET_CODE (insn) == INSN
1635 && reg_mentioned_p (var, PATTERN (insn))
1636 && reg_mentioned_p (call_dest, PATTERN (insn)))
1638 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1640 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1642 PATTERN (insn) = replace_rtx (PATTERN (insn),
1643 call_dest, temp);
1646 if (GET_CODE (insn) == CALL_INSN
1647 && GET_CODE (PATTERN (insn)) == SET)
1648 call_dest = SET_DEST (PATTERN (insn));
1649 else if (GET_CODE (insn) == CALL_INSN
1650 && GET_CODE (PATTERN (insn)) == PARALLEL
1651 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1652 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1653 else
1654 call_dest = 0;
1657 /* See if we have to do anything to INSN now that VAR is in
1658 memory. If it needs to be loaded into a pseudo, use a single
1659 pseudo for the entire insn in case there is a MATCH_DUP
1660 between two operands. We pass a pointer to the head of
1661 a list of struct fixup_replacements. If fixup_var_refs_1
1662 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1663 it will record them in this list.
1665 If it allocated a pseudo for any replacement, we copy into
1666 it here. */
1668 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1669 &replacements);
1671 /* If this is last_parm_insn, and any instructions were output
1672 after it to fix it up, then we must set last_parm_insn to
1673 the last such instruction emitted. */
1674 if (insn == last_parm_insn)
1675 last_parm_insn = PREV_INSN (next_insn);
1677 while (replacements)
1679 if (GET_CODE (replacements->new) == REG)
1681 rtx insert_before;
1682 rtx seq;
1684 /* OLD might be a (subreg (mem)). */
1685 if (GET_CODE (replacements->old) == SUBREG)
1686 replacements->old
1687 = fixup_memory_subreg (replacements->old, insn, 0);
1688 else
1689 replacements->old
1690 = fixup_stack_1 (replacements->old, insn);
1692 insert_before = insn;
1694 /* If we are changing the mode, do a conversion.
1695 This might be wasteful, but combine.c will
1696 eliminate much of the waste. */
1698 if (GET_MODE (replacements->new)
1699 != GET_MODE (replacements->old))
1701 start_sequence ();
1702 convert_move (replacements->new,
1703 replacements->old, unsignedp);
1704 seq = gen_sequence ();
1705 end_sequence ();
1707 else
1708 seq = gen_move_insn (replacements->new,
1709 replacements->old);
1711 emit_insn_before (seq, insert_before);
1714 replacements = replacements->next;
1718 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1719 But don't touch other insns referred to by reg-notes;
1720 we will get them elsewhere. */
1721 while (note)
1723 if (GET_CODE (note) != INSN_LIST)
1724 XEXP (note, 0)
1725 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1726 note = XEXP (note, 1);
1730 if (!ht)
1731 insn = next;
1732 else if (insn_list)
1734 insn = XEXP (insn_list, 0);
1735 insn_list = XEXP (insn_list, 1);
1737 else
1738 insn = NULL_RTX;
1742 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1743 See if the rtx expression at *LOC in INSN needs to be changed.
1745 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1746 contain a list of original rtx's and replacements. If we find that we need
1747 to modify this insn by replacing a memory reference with a pseudo or by
1748 making a new MEM to implement a SUBREG, we consult that list to see if
1749 we have already chosen a replacement. If none has already been allocated,
1750 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1751 or the SUBREG, as appropriate, to the pseudo. */
1753 static void
1754 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1755 register rtx var;
1756 enum machine_mode promoted_mode;
1757 register rtx *loc;
1758 rtx insn;
1759 struct fixup_replacement **replacements;
1761 register int i;
1762 register rtx x = *loc;
1763 RTX_CODE code = GET_CODE (x);
1764 register const char *fmt;
1765 register rtx tem, tem1;
1766 struct fixup_replacement *replacement;
1768 switch (code)
1770 case ADDRESSOF:
1771 if (XEXP (x, 0) == var)
1773 /* Prevent sharing of rtl that might lose. */
1774 rtx sub = copy_rtx (XEXP (var, 0));
1776 if (! validate_change (insn, loc, sub, 0))
1778 rtx y = gen_reg_rtx (GET_MODE (sub));
1779 rtx seq, new_insn;
1781 /* We should be able to replace with a register or all is lost.
1782 Note that we can't use validate_change to verify this, since
1783 we're not caring for replacing all dups simultaneously. */
1784 if (! validate_replace_rtx (*loc, y, insn))
1785 abort ();
1787 /* Careful! First try to recognize a direct move of the
1788 value, mimicking how things are done in gen_reload wrt
1789 PLUS. Consider what happens when insn is a conditional
1790 move instruction and addsi3 clobbers flags. */
1792 start_sequence ();
1793 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1794 seq = gen_sequence ();
1795 end_sequence ();
1797 if (recog_memoized (new_insn) < 0)
1799 /* That failed. Fall back on force_operand and hope. */
1801 start_sequence ();
1802 force_operand (sub, y);
1803 seq = gen_sequence ();
1804 end_sequence ();
1807 #ifdef HAVE_cc0
1808 /* Don't separate setter from user. */
1809 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1810 insn = PREV_INSN (insn);
1811 #endif
1813 emit_insn_before (seq, insn);
1816 return;
1818 case MEM:
1819 if (var == x)
1821 /* If we already have a replacement, use it. Otherwise,
1822 try to fix up this address in case it is invalid. */
1824 replacement = find_fixup_replacement (replacements, var);
1825 if (replacement->new)
1827 *loc = replacement->new;
1828 return;
1831 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1833 /* Unless we are forcing memory to register or we changed the mode,
1834 we can leave things the way they are if the insn is valid. */
1836 INSN_CODE (insn) = -1;
1837 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1838 && recog_memoized (insn) >= 0)
1839 return;
1841 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1842 return;
1845 /* If X contains VAR, we need to unshare it here so that we update
1846 each occurrence separately. But all identical MEMs in one insn
1847 must be replaced with the same rtx because of the possibility of
1848 MATCH_DUPs. */
1850 if (reg_mentioned_p (var, x))
1852 replacement = find_fixup_replacement (replacements, x);
1853 if (replacement->new == 0)
1854 replacement->new = copy_most_rtx (x, var);
1856 *loc = x = replacement->new;
1858 break;
1860 case REG:
1861 case CC0:
1862 case PC:
1863 case CONST_INT:
1864 case CONST:
1865 case SYMBOL_REF:
1866 case LABEL_REF:
1867 case CONST_DOUBLE:
1868 return;
1870 case SIGN_EXTRACT:
1871 case ZERO_EXTRACT:
1872 /* Note that in some cases those types of expressions are altered
1873 by optimize_bit_field, and do not survive to get here. */
1874 if (XEXP (x, 0) == var
1875 || (GET_CODE (XEXP (x, 0)) == SUBREG
1876 && SUBREG_REG (XEXP (x, 0)) == var))
1878 /* Get TEM as a valid MEM in the mode presently in the insn.
1880 We don't worry about the possibility of MATCH_DUP here; it
1881 is highly unlikely and would be tricky to handle. */
1883 tem = XEXP (x, 0);
1884 if (GET_CODE (tem) == SUBREG)
1886 if (GET_MODE_BITSIZE (GET_MODE (tem))
1887 > GET_MODE_BITSIZE (GET_MODE (var)))
1889 replacement = find_fixup_replacement (replacements, var);
1890 if (replacement->new == 0)
1891 replacement->new = gen_reg_rtx (GET_MODE (var));
1892 SUBREG_REG (tem) = replacement->new;
1894 else
1895 tem = fixup_memory_subreg (tem, insn, 0);
1897 else
1898 tem = fixup_stack_1 (tem, insn);
1900 /* Unless we want to load from memory, get TEM into the proper mode
1901 for an extract from memory. This can only be done if the
1902 extract is at a constant position and length. */
1904 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1905 && GET_CODE (XEXP (x, 2)) == CONST_INT
1906 && ! mode_dependent_address_p (XEXP (tem, 0))
1907 && ! MEM_VOLATILE_P (tem))
1909 enum machine_mode wanted_mode = VOIDmode;
1910 enum machine_mode is_mode = GET_MODE (tem);
1911 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1913 #ifdef HAVE_extzv
1914 if (GET_CODE (x) == ZERO_EXTRACT)
1916 wanted_mode
1917 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1918 if (wanted_mode == VOIDmode)
1919 wanted_mode = word_mode;
1921 #endif
1922 #ifdef HAVE_extv
1923 if (GET_CODE (x) == SIGN_EXTRACT)
1925 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1926 if (wanted_mode == VOIDmode)
1927 wanted_mode = word_mode;
1929 #endif
1930 /* If we have a narrower mode, we can do something. */
1931 if (wanted_mode != VOIDmode
1932 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1934 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1935 rtx old_pos = XEXP (x, 2);
1936 rtx newmem;
1938 /* If the bytes and bits are counted differently, we
1939 must adjust the offset. */
1940 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1941 offset = (GET_MODE_SIZE (is_mode)
1942 - GET_MODE_SIZE (wanted_mode) - offset);
1944 pos %= GET_MODE_BITSIZE (wanted_mode);
1946 newmem = gen_rtx_MEM (wanted_mode,
1947 plus_constant (XEXP (tem, 0), offset));
1948 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1949 MEM_COPY_ATTRIBUTES (newmem, tem);
1951 /* Make the change and see if the insn remains valid. */
1952 INSN_CODE (insn) = -1;
1953 XEXP (x, 0) = newmem;
1954 XEXP (x, 2) = GEN_INT (pos);
1956 if (recog_memoized (insn) >= 0)
1957 return;
1959 /* Otherwise, restore old position. XEXP (x, 0) will be
1960 restored later. */
1961 XEXP (x, 2) = old_pos;
1965 /* If we get here, the bitfield extract insn can't accept a memory
1966 reference. Copy the input into a register. */
1968 tem1 = gen_reg_rtx (GET_MODE (tem));
1969 emit_insn_before (gen_move_insn (tem1, tem), insn);
1970 XEXP (x, 0) = tem1;
1971 return;
1973 break;
1975 case SUBREG:
1976 if (SUBREG_REG (x) == var)
1978 /* If this is a special SUBREG made because VAR was promoted
1979 from a wider mode, replace it with VAR and call ourself
1980 recursively, this time saying that the object previously
1981 had its current mode (by virtue of the SUBREG). */
1983 if (SUBREG_PROMOTED_VAR_P (x))
1985 *loc = var;
1986 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1987 return;
1990 /* If this SUBREG makes VAR wider, it has become a paradoxical
1991 SUBREG with VAR in memory, but these aren't allowed at this
1992 stage of the compilation. So load VAR into a pseudo and take
1993 a SUBREG of that pseudo. */
1994 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1996 replacement = find_fixup_replacement (replacements, var);
1997 if (replacement->new == 0)
1998 replacement->new = gen_reg_rtx (GET_MODE (var));
1999 SUBREG_REG (x) = replacement->new;
2000 return;
2003 /* See if we have already found a replacement for this SUBREG.
2004 If so, use it. Otherwise, make a MEM and see if the insn
2005 is recognized. If not, or if we should force MEM into a register,
2006 make a pseudo for this SUBREG. */
2007 replacement = find_fixup_replacement (replacements, x);
2008 if (replacement->new)
2010 *loc = replacement->new;
2011 return;
2014 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2016 INSN_CODE (insn) = -1;
2017 if (! flag_force_mem && recog_memoized (insn) >= 0)
2018 return;
2020 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2021 return;
2023 break;
2025 case SET:
2026 /* First do special simplification of bit-field references. */
2027 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2028 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2029 optimize_bit_field (x, insn, 0);
2030 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2031 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2032 optimize_bit_field (x, insn, NULL_PTR);
2034 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2035 into a register and then store it back out. */
2036 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2037 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2038 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2039 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2040 > GET_MODE_SIZE (GET_MODE (var))))
2042 replacement = find_fixup_replacement (replacements, var);
2043 if (replacement->new == 0)
2044 replacement->new = gen_reg_rtx (GET_MODE (var));
2046 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2047 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2050 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2051 insn into a pseudo and store the low part of the pseudo into VAR. */
2052 if (GET_CODE (SET_DEST (x)) == SUBREG
2053 && SUBREG_REG (SET_DEST (x)) == var
2054 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2055 > GET_MODE_SIZE (GET_MODE (var))))
2057 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2058 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2059 tem)),
2060 insn);
2061 break;
2065 rtx dest = SET_DEST (x);
2066 rtx src = SET_SRC (x);
2067 #ifdef HAVE_insv
2068 rtx outerdest = dest;
2069 #endif
2071 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2072 || GET_CODE (dest) == SIGN_EXTRACT
2073 || GET_CODE (dest) == ZERO_EXTRACT)
2074 dest = XEXP (dest, 0);
2076 if (GET_CODE (src) == SUBREG)
2077 src = XEXP (src, 0);
2079 /* If VAR does not appear at the top level of the SET
2080 just scan the lower levels of the tree. */
2082 if (src != var && dest != var)
2083 break;
2085 /* We will need to rerecognize this insn. */
2086 INSN_CODE (insn) = -1;
2088 #ifdef HAVE_insv
2089 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2091 /* Since this case will return, ensure we fixup all the
2092 operands here. */
2093 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2094 insn, replacements);
2095 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2096 insn, replacements);
2097 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2098 insn, replacements);
2100 tem = XEXP (outerdest, 0);
2102 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2103 that may appear inside a ZERO_EXTRACT.
2104 This was legitimate when the MEM was a REG. */
2105 if (GET_CODE (tem) == SUBREG
2106 && SUBREG_REG (tem) == var)
2107 tem = fixup_memory_subreg (tem, insn, 0);
2108 else
2109 tem = fixup_stack_1 (tem, insn);
2111 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2112 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2113 && ! mode_dependent_address_p (XEXP (tem, 0))
2114 && ! MEM_VOLATILE_P (tem))
2116 enum machine_mode wanted_mode;
2117 enum machine_mode is_mode = GET_MODE (tem);
2118 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2120 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2121 if (wanted_mode == VOIDmode)
2122 wanted_mode = word_mode;
2124 /* If we have a narrower mode, we can do something. */
2125 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2127 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2128 rtx old_pos = XEXP (outerdest, 2);
2129 rtx newmem;
2131 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2132 offset = (GET_MODE_SIZE (is_mode)
2133 - GET_MODE_SIZE (wanted_mode) - offset);
2135 pos %= GET_MODE_BITSIZE (wanted_mode);
2137 newmem = gen_rtx_MEM (wanted_mode,
2138 plus_constant (XEXP (tem, 0),
2139 offset));
2140 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2141 MEM_COPY_ATTRIBUTES (newmem, tem);
2143 /* Make the change and see if the insn remains valid. */
2144 INSN_CODE (insn) = -1;
2145 XEXP (outerdest, 0) = newmem;
2146 XEXP (outerdest, 2) = GEN_INT (pos);
2148 if (recog_memoized (insn) >= 0)
2149 return;
2151 /* Otherwise, restore old position. XEXP (x, 0) will be
2152 restored later. */
2153 XEXP (outerdest, 2) = old_pos;
2157 /* If we get here, the bit-field store doesn't allow memory
2158 or isn't located at a constant position. Load the value into
2159 a register, do the store, and put it back into memory. */
2161 tem1 = gen_reg_rtx (GET_MODE (tem));
2162 emit_insn_before (gen_move_insn (tem1, tem), insn);
2163 emit_insn_after (gen_move_insn (tem, tem1), insn);
2164 XEXP (outerdest, 0) = tem1;
2165 return;
2167 #endif
2169 /* STRICT_LOW_PART is a no-op on memory references
2170 and it can cause combinations to be unrecognizable,
2171 so eliminate it. */
2173 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2174 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2176 /* A valid insn to copy VAR into or out of a register
2177 must be left alone, to avoid an infinite loop here.
2178 If the reference to VAR is by a subreg, fix that up,
2179 since SUBREG is not valid for a memref.
2180 Also fix up the address of the stack slot.
2182 Note that we must not try to recognize the insn until
2183 after we know that we have valid addresses and no
2184 (subreg (mem ...) ...) constructs, since these interfere
2185 with determining the validity of the insn. */
2187 if ((SET_SRC (x) == var
2188 || (GET_CODE (SET_SRC (x)) == SUBREG
2189 && SUBREG_REG (SET_SRC (x)) == var))
2190 && (GET_CODE (SET_DEST (x)) == REG
2191 || (GET_CODE (SET_DEST (x)) == SUBREG
2192 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2193 && GET_MODE (var) == promoted_mode
2194 && x == single_set (insn))
2196 rtx pat;
2198 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2199 if (replacement->new)
2200 SET_SRC (x) = replacement->new;
2201 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2202 SET_SRC (x) = replacement->new
2203 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2204 else
2205 SET_SRC (x) = replacement->new
2206 = fixup_stack_1 (SET_SRC (x), insn);
2208 if (recog_memoized (insn) >= 0)
2209 return;
2211 /* INSN is not valid, but we know that we want to
2212 copy SET_SRC (x) to SET_DEST (x) in some way. So
2213 we generate the move and see whether it requires more
2214 than one insn. If it does, we emit those insns and
2215 delete INSN. Otherwise, we an just replace the pattern
2216 of INSN; we have already verified above that INSN has
2217 no other function that to do X. */
2219 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2220 if (GET_CODE (pat) == SEQUENCE)
2222 emit_insn_after (pat, insn);
2223 PUT_CODE (insn, NOTE);
2224 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2225 NOTE_SOURCE_FILE (insn) = 0;
2227 else
2228 PATTERN (insn) = pat;
2230 return;
2233 if ((SET_DEST (x) == var
2234 || (GET_CODE (SET_DEST (x)) == SUBREG
2235 && SUBREG_REG (SET_DEST (x)) == var))
2236 && (GET_CODE (SET_SRC (x)) == REG
2237 || (GET_CODE (SET_SRC (x)) == SUBREG
2238 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2239 && GET_MODE (var) == promoted_mode
2240 && x == single_set (insn))
2242 rtx pat;
2244 if (GET_CODE (SET_DEST (x)) == SUBREG)
2245 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2246 else
2247 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2249 if (recog_memoized (insn) >= 0)
2250 return;
2252 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2253 if (GET_CODE (pat) == SEQUENCE)
2255 emit_insn_after (pat, insn);
2256 PUT_CODE (insn, NOTE);
2257 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2258 NOTE_SOURCE_FILE (insn) = 0;
2260 else
2261 PATTERN (insn) = pat;
2263 return;
2266 /* Otherwise, storing into VAR must be handled specially
2267 by storing into a temporary and copying that into VAR
2268 with a new insn after this one. Note that this case
2269 will be used when storing into a promoted scalar since
2270 the insn will now have different modes on the input
2271 and output and hence will be invalid (except for the case
2272 of setting it to a constant, which does not need any
2273 change if it is valid). We generate extra code in that case,
2274 but combine.c will eliminate it. */
2276 if (dest == var)
2278 rtx temp;
2279 rtx fixeddest = SET_DEST (x);
2281 /* STRICT_LOW_PART can be discarded, around a MEM. */
2282 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2283 fixeddest = XEXP (fixeddest, 0);
2284 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2285 if (GET_CODE (fixeddest) == SUBREG)
2287 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2288 promoted_mode = GET_MODE (fixeddest);
2290 else
2291 fixeddest = fixup_stack_1 (fixeddest, insn);
2293 temp = gen_reg_rtx (promoted_mode);
2295 emit_insn_after (gen_move_insn (fixeddest,
2296 gen_lowpart (GET_MODE (fixeddest),
2297 temp)),
2298 insn);
2300 SET_DEST (x) = temp;
2304 default:
2305 break;
2308 /* Nothing special about this RTX; fix its operands. */
2310 fmt = GET_RTX_FORMAT (code);
2311 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2313 if (fmt[i] == 'e')
2314 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2315 if (fmt[i] == 'E')
2317 register int j;
2318 for (j = 0; j < XVECLEN (x, i); j++)
2319 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2320 insn, replacements);
2325 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2326 return an rtx (MEM:m1 newaddr) which is equivalent.
2327 If any insns must be emitted to compute NEWADDR, put them before INSN.
2329 UNCRITICAL nonzero means accept paradoxical subregs.
2330 This is used for subregs found inside REG_NOTES. */
2332 static rtx
2333 fixup_memory_subreg (x, insn, uncritical)
2334 rtx x;
2335 rtx insn;
2336 int uncritical;
2338 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2339 rtx addr = XEXP (SUBREG_REG (x), 0);
2340 enum machine_mode mode = GET_MODE (x);
2341 rtx result;
2343 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2344 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2345 && ! uncritical)
2346 abort ();
2348 if (BYTES_BIG_ENDIAN)
2349 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2350 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2351 addr = plus_constant (addr, offset);
2352 if (!flag_force_addr && memory_address_p (mode, addr))
2353 /* Shortcut if no insns need be emitted. */
2354 return change_address (SUBREG_REG (x), mode, addr);
2355 start_sequence ();
2356 result = change_address (SUBREG_REG (x), mode, addr);
2357 emit_insn_before (gen_sequence (), insn);
2358 end_sequence ();
2359 return result;
2362 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2363 Replace subexpressions of X in place.
2364 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2365 Otherwise return X, with its contents possibly altered.
2367 If any insns must be emitted to compute NEWADDR, put them before INSN.
2369 UNCRITICAL is as in fixup_memory_subreg. */
2371 static rtx
2372 walk_fixup_memory_subreg (x, insn, uncritical)
2373 register rtx x;
2374 rtx insn;
2375 int uncritical;
2377 register enum rtx_code code;
2378 register const char *fmt;
2379 register int i;
2381 if (x == 0)
2382 return 0;
2384 code = GET_CODE (x);
2386 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2387 return fixup_memory_subreg (x, insn, uncritical);
2389 /* Nothing special about this RTX; fix its operands. */
2391 fmt = GET_RTX_FORMAT (code);
2392 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2394 if (fmt[i] == 'e')
2395 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2396 if (fmt[i] == 'E')
2398 register int j;
2399 for (j = 0; j < XVECLEN (x, i); j++)
2400 XVECEXP (x, i, j)
2401 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2404 return x;
2407 /* For each memory ref within X, if it refers to a stack slot
2408 with an out of range displacement, put the address in a temp register
2409 (emitting new insns before INSN to load these registers)
2410 and alter the memory ref to use that register.
2411 Replace each such MEM rtx with a copy, to avoid clobberage. */
2413 static rtx
2414 fixup_stack_1 (x, insn)
2415 rtx x;
2416 rtx insn;
2418 register int i;
2419 register RTX_CODE code = GET_CODE (x);
2420 register const char *fmt;
2422 if (code == MEM)
2424 register rtx ad = XEXP (x, 0);
2425 /* If we have address of a stack slot but it's not valid
2426 (displacement is too large), compute the sum in a register. */
2427 if (GET_CODE (ad) == PLUS
2428 && GET_CODE (XEXP (ad, 0)) == REG
2429 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2430 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2431 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2432 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2433 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2434 #endif
2435 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2436 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2437 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2438 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2440 rtx temp, seq;
2441 if (memory_address_p (GET_MODE (x), ad))
2442 return x;
2444 start_sequence ();
2445 temp = copy_to_reg (ad);
2446 seq = gen_sequence ();
2447 end_sequence ();
2448 emit_insn_before (seq, insn);
2449 return change_address (x, VOIDmode, temp);
2451 return x;
2454 fmt = GET_RTX_FORMAT (code);
2455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2457 if (fmt[i] == 'e')
2458 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2459 if (fmt[i] == 'E')
2461 register int j;
2462 for (j = 0; j < XVECLEN (x, i); j++)
2463 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2466 return x;
2469 /* Optimization: a bit-field instruction whose field
2470 happens to be a byte or halfword in memory
2471 can be changed to a move instruction.
2473 We call here when INSN is an insn to examine or store into a bit-field.
2474 BODY is the SET-rtx to be altered.
2476 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2477 (Currently this is called only from function.c, and EQUIV_MEM
2478 is always 0.) */
2480 static void
2481 optimize_bit_field (body, insn, equiv_mem)
2482 rtx body;
2483 rtx insn;
2484 rtx *equiv_mem;
2486 register rtx bitfield;
2487 int destflag;
2488 rtx seq = 0;
2489 enum machine_mode mode;
2491 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2492 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2493 bitfield = SET_DEST (body), destflag = 1;
2494 else
2495 bitfield = SET_SRC (body), destflag = 0;
2497 /* First check that the field being stored has constant size and position
2498 and is in fact a byte or halfword suitably aligned. */
2500 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2501 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2502 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2503 != BLKmode)
2504 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2506 register rtx memref = 0;
2508 /* Now check that the containing word is memory, not a register,
2509 and that it is safe to change the machine mode. */
2511 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2512 memref = XEXP (bitfield, 0);
2513 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2514 && equiv_mem != 0)
2515 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2516 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2517 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2518 memref = SUBREG_REG (XEXP (bitfield, 0));
2519 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2520 && equiv_mem != 0
2521 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2522 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2524 if (memref
2525 && ! mode_dependent_address_p (XEXP (memref, 0))
2526 && ! MEM_VOLATILE_P (memref))
2528 /* Now adjust the address, first for any subreg'ing
2529 that we are now getting rid of,
2530 and then for which byte of the word is wanted. */
2532 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2533 rtx insns;
2535 /* Adjust OFFSET to count bits from low-address byte. */
2536 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2537 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2538 - offset - INTVAL (XEXP (bitfield, 1)));
2540 /* Adjust OFFSET to count bytes from low-address byte. */
2541 offset /= BITS_PER_UNIT;
2542 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2544 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2545 if (BYTES_BIG_ENDIAN)
2546 offset -= (MIN (UNITS_PER_WORD,
2547 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2548 - MIN (UNITS_PER_WORD,
2549 GET_MODE_SIZE (GET_MODE (memref))));
2552 start_sequence ();
2553 memref = change_address (memref, mode,
2554 plus_constant (XEXP (memref, 0), offset));
2555 insns = get_insns ();
2556 end_sequence ();
2557 emit_insns_before (insns, insn);
2559 /* Store this memory reference where
2560 we found the bit field reference. */
2562 if (destflag)
2564 validate_change (insn, &SET_DEST (body), memref, 1);
2565 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2567 rtx src = SET_SRC (body);
2568 while (GET_CODE (src) == SUBREG
2569 && SUBREG_WORD (src) == 0)
2570 src = SUBREG_REG (src);
2571 if (GET_MODE (src) != GET_MODE (memref))
2572 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2573 validate_change (insn, &SET_SRC (body), src, 1);
2575 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2576 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2577 /* This shouldn't happen because anything that didn't have
2578 one of these modes should have got converted explicitly
2579 and then referenced through a subreg.
2580 This is so because the original bit-field was
2581 handled by agg_mode and so its tree structure had
2582 the same mode that memref now has. */
2583 abort ();
2585 else
2587 rtx dest = SET_DEST (body);
2589 while (GET_CODE (dest) == SUBREG
2590 && SUBREG_WORD (dest) == 0
2591 && (GET_MODE_CLASS (GET_MODE (dest))
2592 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2593 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2594 <= UNITS_PER_WORD))
2595 dest = SUBREG_REG (dest);
2597 validate_change (insn, &SET_DEST (body), dest, 1);
2599 if (GET_MODE (dest) == GET_MODE (memref))
2600 validate_change (insn, &SET_SRC (body), memref, 1);
2601 else
2603 /* Convert the mem ref to the destination mode. */
2604 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2606 start_sequence ();
2607 convert_move (newreg, memref,
2608 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2609 seq = get_insns ();
2610 end_sequence ();
2612 validate_change (insn, &SET_SRC (body), newreg, 1);
2616 /* See if we can convert this extraction or insertion into
2617 a simple move insn. We might not be able to do so if this
2618 was, for example, part of a PARALLEL.
2620 If we succeed, write out any needed conversions. If we fail,
2621 it is hard to guess why we failed, so don't do anything
2622 special; just let the optimization be suppressed. */
2624 if (apply_change_group () && seq)
2625 emit_insns_before (seq, insn);
2630 /* These routines are responsible for converting virtual register references
2631 to the actual hard register references once RTL generation is complete.
2633 The following four variables are used for communication between the
2634 routines. They contain the offsets of the virtual registers from their
2635 respective hard registers. */
2637 static int in_arg_offset;
2638 static int var_offset;
2639 static int dynamic_offset;
2640 static int out_arg_offset;
2641 static int cfa_offset;
2643 /* In most machines, the stack pointer register is equivalent to the bottom
2644 of the stack. */
2646 #ifndef STACK_POINTER_OFFSET
2647 #define STACK_POINTER_OFFSET 0
2648 #endif
2650 /* If not defined, pick an appropriate default for the offset of dynamically
2651 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2652 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2654 #ifndef STACK_DYNAMIC_OFFSET
2656 #ifdef ACCUMULATE_OUTGOING_ARGS
2657 /* The bottom of the stack points to the actual arguments. If
2658 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2659 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2660 stack space for register parameters is not pushed by the caller, but
2661 rather part of the fixed stack areas and hence not included in
2662 `current_function_outgoing_args_size'. Nevertheless, we must allow
2663 for it when allocating stack dynamic objects. */
2665 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2666 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2667 (current_function_outgoing_args_size \
2668 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2670 #else
2671 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2672 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2673 #endif
2675 #else
2676 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2677 #endif
2678 #endif
2680 /* On a few machines, the CFA coincides with the arg pointer. */
2682 #ifndef ARG_POINTER_CFA_OFFSET
2683 #define ARG_POINTER_CFA_OFFSET 0
2684 #endif
2687 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2688 its address taken. DECL is the decl for the object stored in the
2689 register, for later use if we do need to force REG into the stack.
2690 REG is overwritten by the MEM like in put_reg_into_stack. */
2693 gen_mem_addressof (reg, decl)
2694 rtx reg;
2695 tree decl;
2697 tree type = TREE_TYPE (decl);
2698 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2699 REGNO (reg), decl);
2701 /* If the original REG was a user-variable, then so is the REG whose
2702 address is being taken. Likewise for unchanging. */
2703 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2704 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2706 PUT_CODE (reg, MEM);
2707 PUT_MODE (reg, DECL_MODE (decl));
2708 XEXP (reg, 0) = r;
2709 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2710 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2711 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2713 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2714 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2716 return reg;
2719 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2721 #if 0
2722 void
2723 flush_addressof (decl)
2724 tree decl;
2726 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2727 && DECL_RTL (decl) != 0
2728 && GET_CODE (DECL_RTL (decl)) == MEM
2729 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2730 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2731 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2733 #endif
2735 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2737 static void
2738 put_addressof_into_stack (r, ht)
2739 rtx r;
2740 struct hash_table *ht;
2742 tree decl = ADDRESSOF_DECL (r);
2743 rtx reg = XEXP (r, 0);
2745 if (GET_CODE (reg) != REG)
2746 abort ();
2748 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2749 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2750 ADDRESSOF_REGNO (r),
2751 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2754 /* List of replacements made below in purge_addressof_1 when creating
2755 bitfield insertions. */
2756 static rtx purge_bitfield_addressof_replacements;
2758 /* List of replacements made below in purge_addressof_1 for patterns
2759 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2760 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2761 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2762 enough in complex cases, e.g. when some field values can be
2763 extracted by usage MEM with narrower mode. */
2764 static rtx purge_addressof_replacements;
2766 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2767 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2768 the stack. */
2770 static void
2771 purge_addressof_1 (loc, insn, force, store, ht)
2772 rtx *loc;
2773 rtx insn;
2774 int force, store;
2775 struct hash_table *ht;
2777 rtx x;
2778 RTX_CODE code;
2779 int i, j;
2780 const char *fmt;
2782 /* Re-start here to avoid recursion in common cases. */
2783 restart:
2785 x = *loc;
2786 if (x == 0)
2787 return;
2789 code = GET_CODE (x);
2791 /* If we don't return in any of the cases below, we will recurse inside
2792 the RTX, which will normally result in any ADDRESSOF being forced into
2793 memory. */
2794 if (code == SET)
2796 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2797 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2798 return;
2801 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2803 /* We must create a copy of the rtx because it was created by
2804 overwriting a REG rtx which is always shared. */
2805 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2806 rtx insns;
2808 if (validate_change (insn, loc, sub, 0)
2809 || validate_replace_rtx (x, sub, insn))
2810 return;
2812 start_sequence ();
2813 sub = force_operand (sub, NULL_RTX);
2814 if (! validate_change (insn, loc, sub, 0)
2815 && ! validate_replace_rtx (x, sub, insn))
2816 abort ();
2818 insns = gen_sequence ();
2819 end_sequence ();
2820 emit_insn_before (insns, insn);
2821 return;
2824 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2826 rtx sub = XEXP (XEXP (x, 0), 0);
2827 rtx sub2;
2829 if (GET_CODE (sub) == MEM)
2831 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2832 MEM_COPY_ATTRIBUTES (sub2, sub);
2833 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2834 sub = sub2;
2836 else if (GET_CODE (sub) == REG
2837 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2839 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2841 int size_x, size_sub;
2843 if (!insn)
2845 /* When processing REG_NOTES look at the list of
2846 replacements done on the insn to find the register that X
2847 was replaced by. */
2848 rtx tem;
2850 for (tem = purge_bitfield_addressof_replacements;
2851 tem != NULL_RTX;
2852 tem = XEXP (XEXP (tem, 1), 1))
2853 if (rtx_equal_p (x, XEXP (tem, 0)))
2855 *loc = XEXP (XEXP (tem, 1), 0);
2856 return;
2859 /* See comment for purge_addressof_replacements. */
2860 for (tem = purge_addressof_replacements;
2861 tem != NULL_RTX;
2862 tem = XEXP (XEXP (tem, 1), 1))
2863 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2865 rtx z = XEXP (XEXP (tem, 1), 0);
2867 if (GET_MODE (x) == GET_MODE (z)
2868 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2869 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2870 abort ();
2872 /* It can happen that the note may speak of things
2873 in a wider (or just different) mode than the
2874 code did. This is especially true of
2875 REG_RETVAL. */
2877 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2878 z = SUBREG_REG (z);
2880 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2881 && (GET_MODE_SIZE (GET_MODE (x))
2882 > GET_MODE_SIZE (GET_MODE (z))))
2884 /* This can occur as a result in invalid
2885 pointer casts, e.g. float f; ...
2886 *(long long int *)&f.
2887 ??? We could emit a warning here, but
2888 without a line number that wouldn't be
2889 very helpful. */
2890 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2892 else
2893 z = gen_lowpart (GET_MODE (x), z);
2895 *loc = z;
2896 return;
2899 /* There should always be such a replacement. */
2900 abort ();
2903 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2904 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2906 /* Don't even consider working with paradoxical subregs,
2907 or the moral equivalent seen here. */
2908 if (size_x <= size_sub
2909 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2911 /* Do a bitfield insertion to mirror what would happen
2912 in memory. */
2914 rtx val, seq;
2916 if (store)
2918 rtx p = PREV_INSN (insn);
2920 start_sequence ();
2921 val = gen_reg_rtx (GET_MODE (x));
2922 if (! validate_change (insn, loc, val, 0))
2924 /* Discard the current sequence and put the
2925 ADDRESSOF on stack. */
2926 end_sequence ();
2927 goto give_up;
2929 seq = gen_sequence ();
2930 end_sequence ();
2931 emit_insn_before (seq, insn);
2932 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2933 insn, ht);
2935 start_sequence ();
2936 store_bit_field (sub, size_x, 0, GET_MODE (x),
2937 val, GET_MODE_SIZE (GET_MODE (sub)),
2938 GET_MODE_SIZE (GET_MODE (sub)));
2940 /* Make sure to unshare any shared rtl that store_bit_field
2941 might have created. */
2942 for (p = get_insns(); p; p = NEXT_INSN (p))
2944 reset_used_flags (PATTERN (p));
2945 reset_used_flags (REG_NOTES (p));
2946 reset_used_flags (LOG_LINKS (p));
2948 unshare_all_rtl (get_insns ());
2950 seq = gen_sequence ();
2951 end_sequence ();
2952 p = emit_insn_after (seq, insn);
2953 if (NEXT_INSN (insn))
2954 compute_insns_for_mem (NEXT_INSN (insn),
2955 p ? NEXT_INSN (p) : NULL_RTX,
2956 ht);
2958 else
2960 rtx p = PREV_INSN (insn);
2962 start_sequence ();
2963 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2964 GET_MODE (x), GET_MODE (x),
2965 GET_MODE_SIZE (GET_MODE (sub)),
2966 GET_MODE_SIZE (GET_MODE (sub)));
2968 if (! validate_change (insn, loc, val, 0))
2970 /* Discard the current sequence and put the
2971 ADDRESSOF on stack. */
2972 end_sequence ();
2973 goto give_up;
2976 seq = gen_sequence ();
2977 end_sequence ();
2978 emit_insn_before (seq, insn);
2979 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2980 insn, ht);
2983 /* Remember the replacement so that the same one can be done
2984 on the REG_NOTES. */
2985 purge_bitfield_addressof_replacements
2986 = gen_rtx_EXPR_LIST (VOIDmode, x,
2987 gen_rtx_EXPR_LIST
2988 (VOIDmode, val,
2989 purge_bitfield_addressof_replacements));
2991 /* We replaced with a reg -- all done. */
2992 return;
2996 else if (validate_change (insn, loc, sub, 0))
2998 /* Remember the replacement so that the same one can be done
2999 on the REG_NOTES. */
3000 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3002 rtx tem;
3004 for (tem = purge_addressof_replacements;
3005 tem != NULL_RTX;
3006 tem = XEXP (XEXP (tem, 1), 1))
3007 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3009 XEXP (XEXP (tem, 1), 0) = sub;
3010 return;
3012 purge_addressof_replacements
3013 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3014 gen_rtx_EXPR_LIST (VOIDmode, sub,
3015 purge_addressof_replacements));
3016 return;
3018 goto restart;
3020 give_up:;
3021 /* else give up and put it into the stack */
3024 else if (code == ADDRESSOF)
3026 put_addressof_into_stack (x, ht);
3027 return;
3029 else if (code == SET)
3031 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3032 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3033 return;
3036 /* Scan all subexpressions. */
3037 fmt = GET_RTX_FORMAT (code);
3038 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3040 if (*fmt == 'e')
3041 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3042 else if (*fmt == 'E')
3043 for (j = 0; j < XVECLEN (x, i); j++)
3044 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3048 /* Return a new hash table entry in HT. */
3050 static struct hash_entry *
3051 insns_for_mem_newfunc (he, ht, k)
3052 struct hash_entry *he;
3053 struct hash_table *ht;
3054 hash_table_key k ATTRIBUTE_UNUSED;
3056 struct insns_for_mem_entry *ifmhe;
3057 if (he)
3058 return he;
3060 ifmhe = ((struct insns_for_mem_entry *)
3061 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3062 ifmhe->insns = NULL_RTX;
3064 return &ifmhe->he;
3067 /* Return a hash value for K, a REG. */
3069 static unsigned long
3070 insns_for_mem_hash (k)
3071 hash_table_key k;
3073 /* K is really a RTX. Just use the address as the hash value. */
3074 return (unsigned long) k;
3077 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3079 static boolean
3080 insns_for_mem_comp (k1, k2)
3081 hash_table_key k1;
3082 hash_table_key k2;
3084 return k1 == k2;
3087 struct insns_for_mem_walk_info {
3088 /* The hash table that we are using to record which INSNs use which
3089 MEMs. */
3090 struct hash_table *ht;
3092 /* The INSN we are currently proessing. */
3093 rtx insn;
3095 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3096 to find the insns that use the REGs in the ADDRESSOFs. */
3097 int pass;
3100 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3101 that might be used in an ADDRESSOF expression, record this INSN in
3102 the hash table given by DATA (which is really a pointer to an
3103 insns_for_mem_walk_info structure). */
3105 static int
3106 insns_for_mem_walk (r, data)
3107 rtx *r;
3108 void *data;
3110 struct insns_for_mem_walk_info *ifmwi
3111 = (struct insns_for_mem_walk_info *) data;
3113 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3114 && GET_CODE (XEXP (*r, 0)) == REG)
3115 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3116 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3118 /* Lookup this MEM in the hashtable, creating it if necessary. */
3119 struct insns_for_mem_entry *ifme
3120 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3122 /*create=*/0,
3123 /*copy=*/0);
3125 /* If we have not already recorded this INSN, do so now. Since
3126 we process the INSNs in order, we know that if we have
3127 recorded it it must be at the front of the list. */
3128 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3130 /* We do the allocation on the same obstack as is used for
3131 the hash table since this memory will not be used once
3132 the hash table is deallocated. */
3133 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3134 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3135 ifme->insns);
3136 pop_obstacks ();
3140 return 0;
3143 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3144 which REGs in HT. */
3146 static void
3147 compute_insns_for_mem (insns, last_insn, ht)
3148 rtx insns;
3149 rtx last_insn;
3150 struct hash_table *ht;
3152 rtx insn;
3153 struct insns_for_mem_walk_info ifmwi;
3154 ifmwi.ht = ht;
3156 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3157 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3158 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3160 ifmwi.insn = insn;
3161 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3165 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3166 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3167 stack. */
3169 void
3170 purge_addressof (insns)
3171 rtx insns;
3173 rtx insn;
3174 struct hash_table ht;
3176 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3177 requires a fixup pass over the instruction stream to correct
3178 INSNs that depended on the REG being a REG, and not a MEM. But,
3179 these fixup passes are slow. Furthermore, more MEMs are not
3180 mentioned in very many instructions. So, we speed up the process
3181 by pre-calculating which REGs occur in which INSNs; that allows
3182 us to perform the fixup passes much more quickly. */
3183 hash_table_init (&ht,
3184 insns_for_mem_newfunc,
3185 insns_for_mem_hash,
3186 insns_for_mem_comp);
3187 compute_insns_for_mem (insns, NULL_RTX, &ht);
3189 for (insn = insns; insn; insn = NEXT_INSN (insn))
3190 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3191 || GET_CODE (insn) == CALL_INSN)
3193 purge_addressof_1 (&PATTERN (insn), insn,
3194 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3195 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
3198 /* Clean up. */
3199 hash_table_free (&ht);
3200 purge_bitfield_addressof_replacements = 0;
3201 purge_addressof_replacements = 0;
3204 /* Pass through the INSNS of function FNDECL and convert virtual register
3205 references to hard register references. */
3207 void
3208 instantiate_virtual_regs (fndecl, insns)
3209 tree fndecl;
3210 rtx insns;
3212 rtx insn;
3213 int i;
3215 /* Compute the offsets to use for this function. */
3216 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3217 var_offset = STARTING_FRAME_OFFSET;
3218 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3219 out_arg_offset = STACK_POINTER_OFFSET;
3220 cfa_offset = ARG_POINTER_CFA_OFFSET;
3222 /* Scan all variables and parameters of this function. For each that is
3223 in memory, instantiate all virtual registers if the result is a valid
3224 address. If not, we do it later. That will handle most uses of virtual
3225 regs on many machines. */
3226 instantiate_decls (fndecl, 1);
3228 /* Initialize recognition, indicating that volatile is OK. */
3229 init_recog ();
3231 /* Scan through all the insns, instantiating every virtual register still
3232 present. */
3233 for (insn = insns; insn; insn = NEXT_INSN (insn))
3234 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3235 || GET_CODE (insn) == CALL_INSN)
3237 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3238 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3241 /* Instantiate the stack slots for the parm registers, for later use in
3242 addressof elimination. */
3243 for (i = 0; i < max_parm_reg; ++i)
3244 if (parm_reg_stack_loc[i])
3245 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3247 /* Now instantiate the remaining register equivalences for debugging info.
3248 These will not be valid addresses. */
3249 instantiate_decls (fndecl, 0);
3251 /* Indicate that, from now on, assign_stack_local should use
3252 frame_pointer_rtx. */
3253 virtuals_instantiated = 1;
3256 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3257 all virtual registers in their DECL_RTL's.
3259 If VALID_ONLY, do this only if the resulting address is still valid.
3260 Otherwise, always do it. */
3262 static void
3263 instantiate_decls (fndecl, valid_only)
3264 tree fndecl;
3265 int valid_only;
3267 tree decl;
3269 if (DECL_SAVED_INSNS (fndecl))
3270 /* When compiling an inline function, the obstack used for
3271 rtl allocation is the maybepermanent_obstack. Calling
3272 `resume_temporary_allocation' switches us back to that
3273 obstack while we process this function's parameters. */
3274 resume_temporary_allocation ();
3276 /* Process all parameters of the function. */
3277 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3279 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3281 instantiate_decl (DECL_RTL (decl), size, valid_only);
3283 /* If the parameter was promoted, then the incoming RTL mode may be
3284 larger than the declared type size. We must use the larger of
3285 the two sizes. */
3286 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3287 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3290 /* Now process all variables defined in the function or its subblocks. */
3291 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3293 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3295 /* Save all rtl allocated for this function by raising the
3296 high-water mark on the maybepermanent_obstack. */
3297 preserve_data ();
3298 /* All further rtl allocation is now done in the current_obstack. */
3299 rtl_in_current_obstack ();
3303 /* Subroutine of instantiate_decls: Process all decls in the given
3304 BLOCK node and all its subblocks. */
3306 static void
3307 instantiate_decls_1 (let, valid_only)
3308 tree let;
3309 int valid_only;
3311 tree t;
3313 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3314 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3315 valid_only);
3317 /* Process all subblocks. */
3318 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3319 instantiate_decls_1 (t, valid_only);
3322 /* Subroutine of the preceding procedures: Given RTL representing a
3323 decl and the size of the object, do any instantiation required.
3325 If VALID_ONLY is non-zero, it means that the RTL should only be
3326 changed if the new address is valid. */
3328 static void
3329 instantiate_decl (x, size, valid_only)
3330 rtx x;
3331 int size;
3332 int valid_only;
3334 enum machine_mode mode;
3335 rtx addr;
3337 /* If this is not a MEM, no need to do anything. Similarly if the
3338 address is a constant or a register that is not a virtual register. */
3340 if (x == 0 || GET_CODE (x) != MEM)
3341 return;
3343 addr = XEXP (x, 0);
3344 if (CONSTANT_P (addr)
3345 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3346 || (GET_CODE (addr) == REG
3347 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3348 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3349 return;
3351 /* If we should only do this if the address is valid, copy the address.
3352 We need to do this so we can undo any changes that might make the
3353 address invalid. This copy is unfortunate, but probably can't be
3354 avoided. */
3356 if (valid_only)
3357 addr = copy_rtx (addr);
3359 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3361 if (valid_only)
3363 /* Now verify that the resulting address is valid for every integer or
3364 floating-point mode up to and including SIZE bytes long. We do this
3365 since the object might be accessed in any mode and frame addresses
3366 are shared. */
3368 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3369 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3370 mode = GET_MODE_WIDER_MODE (mode))
3371 if (! memory_address_p (mode, addr))
3372 return;
3374 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3375 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3376 mode = GET_MODE_WIDER_MODE (mode))
3377 if (! memory_address_p (mode, addr))
3378 return;
3381 /* Put back the address now that we have updated it and we either know
3382 it is valid or we don't care whether it is valid. */
3384 XEXP (x, 0) = addr;
3387 /* Given a pointer to a piece of rtx and an optional pointer to the
3388 containing object, instantiate any virtual registers present in it.
3390 If EXTRA_INSNS, we always do the replacement and generate
3391 any extra insns before OBJECT. If it zero, we do nothing if replacement
3392 is not valid.
3394 Return 1 if we either had nothing to do or if we were able to do the
3395 needed replacement. Return 0 otherwise; we only return zero if
3396 EXTRA_INSNS is zero.
3398 We first try some simple transformations to avoid the creation of extra
3399 pseudos. */
3401 static int
3402 instantiate_virtual_regs_1 (loc, object, extra_insns)
3403 rtx *loc;
3404 rtx object;
3405 int extra_insns;
3407 rtx x;
3408 RTX_CODE code;
3409 rtx new = 0;
3410 HOST_WIDE_INT offset = 0;
3411 rtx temp;
3412 rtx seq;
3413 int i, j;
3414 const char *fmt;
3416 /* Re-start here to avoid recursion in common cases. */
3417 restart:
3419 x = *loc;
3420 if (x == 0)
3421 return 1;
3423 code = GET_CODE (x);
3425 /* Check for some special cases. */
3426 switch (code)
3428 case CONST_INT:
3429 case CONST_DOUBLE:
3430 case CONST:
3431 case SYMBOL_REF:
3432 case CODE_LABEL:
3433 case PC:
3434 case CC0:
3435 case ASM_INPUT:
3436 case ADDR_VEC:
3437 case ADDR_DIFF_VEC:
3438 case RETURN:
3439 return 1;
3441 case SET:
3442 /* We are allowed to set the virtual registers. This means that
3443 the actual register should receive the source minus the
3444 appropriate offset. This is used, for example, in the handling
3445 of non-local gotos. */
3446 if (SET_DEST (x) == virtual_incoming_args_rtx)
3447 new = arg_pointer_rtx, offset = - in_arg_offset;
3448 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3449 new = frame_pointer_rtx, offset = - var_offset;
3450 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3451 new = stack_pointer_rtx, offset = - dynamic_offset;
3452 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3453 new = stack_pointer_rtx, offset = - out_arg_offset;
3454 else if (SET_DEST (x) == virtual_cfa_rtx)
3455 new = arg_pointer_rtx, offset = - cfa_offset;
3457 if (new)
3459 rtx src = SET_SRC (x);
3461 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3463 /* The only valid sources here are PLUS or REG. Just do
3464 the simplest possible thing to handle them. */
3465 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3466 abort ();
3468 start_sequence ();
3469 if (GET_CODE (src) != REG)
3470 temp = force_operand (src, NULL_RTX);
3471 else
3472 temp = src;
3473 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3474 seq = get_insns ();
3475 end_sequence ();
3477 emit_insns_before (seq, object);
3478 SET_DEST (x) = new;
3480 if (! validate_change (object, &SET_SRC (x), temp, 0)
3481 || ! extra_insns)
3482 abort ();
3484 return 1;
3487 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3488 loc = &SET_SRC (x);
3489 goto restart;
3491 case PLUS:
3492 /* Handle special case of virtual register plus constant. */
3493 if (CONSTANT_P (XEXP (x, 1)))
3495 rtx old, new_offset;
3497 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3498 if (GET_CODE (XEXP (x, 0)) == PLUS)
3500 rtx inner = XEXP (XEXP (x, 0), 0);
3502 if (inner == virtual_incoming_args_rtx)
3503 new = arg_pointer_rtx, offset = in_arg_offset;
3504 else if (inner == virtual_stack_vars_rtx)
3505 new = frame_pointer_rtx, offset = var_offset;
3506 else if (inner == virtual_stack_dynamic_rtx)
3507 new = stack_pointer_rtx, offset = dynamic_offset;
3508 else if (inner == virtual_outgoing_args_rtx)
3509 new = stack_pointer_rtx, offset = out_arg_offset;
3510 else if (inner == virtual_cfa_rtx)
3511 new = arg_pointer_rtx, offset = cfa_offset;
3512 else
3514 loc = &XEXP (x, 0);
3515 goto restart;
3518 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3519 extra_insns);
3520 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3523 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3524 new = arg_pointer_rtx, offset = in_arg_offset;
3525 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3526 new = frame_pointer_rtx, offset = var_offset;
3527 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3528 new = stack_pointer_rtx, offset = dynamic_offset;
3529 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3530 new = stack_pointer_rtx, offset = out_arg_offset;
3531 else if (XEXP (x, 0) == virtual_cfa_rtx)
3532 new = arg_pointer_rtx, offset = cfa_offset;
3533 else
3535 /* We know the second operand is a constant. Unless the
3536 first operand is a REG (which has been already checked),
3537 it needs to be checked. */
3538 if (GET_CODE (XEXP (x, 0)) != REG)
3540 loc = &XEXP (x, 0);
3541 goto restart;
3543 return 1;
3546 new_offset = plus_constant (XEXP (x, 1), offset);
3548 /* If the new constant is zero, try to replace the sum with just
3549 the register. */
3550 if (new_offset == const0_rtx
3551 && validate_change (object, loc, new, 0))
3552 return 1;
3554 /* Next try to replace the register and new offset.
3555 There are two changes to validate here and we can't assume that
3556 in the case of old offset equals new just changing the register
3557 will yield a valid insn. In the interests of a little efficiency,
3558 however, we only call validate change once (we don't queue up the
3559 changes and then call apply_change_group). */
3561 old = XEXP (x, 0);
3562 if (offset == 0
3563 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3564 : (XEXP (x, 0) = new,
3565 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3567 if (! extra_insns)
3569 XEXP (x, 0) = old;
3570 return 0;
3573 /* Otherwise copy the new constant into a register and replace
3574 constant with that register. */
3575 temp = gen_reg_rtx (Pmode);
3576 XEXP (x, 0) = new;
3577 if (validate_change (object, &XEXP (x, 1), temp, 0))
3578 emit_insn_before (gen_move_insn (temp, new_offset), object);
3579 else
3581 /* If that didn't work, replace this expression with a
3582 register containing the sum. */
3584 XEXP (x, 0) = old;
3585 new = gen_rtx_PLUS (Pmode, new, new_offset);
3587 start_sequence ();
3588 temp = force_operand (new, NULL_RTX);
3589 seq = get_insns ();
3590 end_sequence ();
3592 emit_insns_before (seq, object);
3593 if (! validate_change (object, loc, temp, 0)
3594 && ! validate_replace_rtx (x, temp, object))
3595 abort ();
3599 return 1;
3602 /* Fall through to generic two-operand expression case. */
3603 case EXPR_LIST:
3604 case CALL:
3605 case COMPARE:
3606 case MINUS:
3607 case MULT:
3608 case DIV: case UDIV:
3609 case MOD: case UMOD:
3610 case AND: case IOR: case XOR:
3611 case ROTATERT: case ROTATE:
3612 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3613 case NE: case EQ:
3614 case GE: case GT: case GEU: case GTU:
3615 case LE: case LT: case LEU: case LTU:
3616 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3617 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3618 loc = &XEXP (x, 0);
3619 goto restart;
3621 case MEM:
3622 /* Most cases of MEM that convert to valid addresses have already been
3623 handled by our scan of decls. The only special handling we
3624 need here is to make a copy of the rtx to ensure it isn't being
3625 shared if we have to change it to a pseudo.
3627 If the rtx is a simple reference to an address via a virtual register,
3628 it can potentially be shared. In such cases, first try to make it
3629 a valid address, which can also be shared. Otherwise, copy it and
3630 proceed normally.
3632 First check for common cases that need no processing. These are
3633 usually due to instantiation already being done on a previous instance
3634 of a shared rtx. */
3636 temp = XEXP (x, 0);
3637 if (CONSTANT_ADDRESS_P (temp)
3638 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3639 || temp == arg_pointer_rtx
3640 #endif
3641 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3642 || temp == hard_frame_pointer_rtx
3643 #endif
3644 || temp == frame_pointer_rtx)
3645 return 1;
3647 if (GET_CODE (temp) == PLUS
3648 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3649 && (XEXP (temp, 0) == frame_pointer_rtx
3650 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3651 || XEXP (temp, 0) == hard_frame_pointer_rtx
3652 #endif
3653 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3654 || XEXP (temp, 0) == arg_pointer_rtx
3655 #endif
3657 return 1;
3659 if (temp == virtual_stack_vars_rtx
3660 || temp == virtual_incoming_args_rtx
3661 || (GET_CODE (temp) == PLUS
3662 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3663 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3664 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3666 /* This MEM may be shared. If the substitution can be done without
3667 the need to generate new pseudos, we want to do it in place
3668 so all copies of the shared rtx benefit. The call below will
3669 only make substitutions if the resulting address is still
3670 valid.
3672 Note that we cannot pass X as the object in the recursive call
3673 since the insn being processed may not allow all valid
3674 addresses. However, if we were not passed on object, we can
3675 only modify X without copying it if X will have a valid
3676 address.
3678 ??? Also note that this can still lose if OBJECT is an insn that
3679 has less restrictions on an address that some other insn.
3680 In that case, we will modify the shared address. This case
3681 doesn't seem very likely, though. One case where this could
3682 happen is in the case of a USE or CLOBBER reference, but we
3683 take care of that below. */
3685 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3686 object ? object : x, 0))
3687 return 1;
3689 /* Otherwise make a copy and process that copy. We copy the entire
3690 RTL expression since it might be a PLUS which could also be
3691 shared. */
3692 *loc = x = copy_rtx (x);
3695 /* Fall through to generic unary operation case. */
3696 case SUBREG:
3697 case STRICT_LOW_PART:
3698 case NEG: case NOT:
3699 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3700 case SIGN_EXTEND: case ZERO_EXTEND:
3701 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3702 case FLOAT: case FIX:
3703 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3704 case ABS:
3705 case SQRT:
3706 case FFS:
3707 /* These case either have just one operand or we know that we need not
3708 check the rest of the operands. */
3709 loc = &XEXP (x, 0);
3710 goto restart;
3712 case USE:
3713 case CLOBBER:
3714 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3715 go ahead and make the invalid one, but do it to a copy. For a REG,
3716 just make the recursive call, since there's no chance of a problem. */
3718 if ((GET_CODE (XEXP (x, 0)) == MEM
3719 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3721 || (GET_CODE (XEXP (x, 0)) == REG
3722 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3723 return 1;
3725 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3726 loc = &XEXP (x, 0);
3727 goto restart;
3729 case REG:
3730 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3731 in front of this insn and substitute the temporary. */
3732 if (x == virtual_incoming_args_rtx)
3733 new = arg_pointer_rtx, offset = in_arg_offset;
3734 else if (x == virtual_stack_vars_rtx)
3735 new = frame_pointer_rtx, offset = var_offset;
3736 else if (x == virtual_stack_dynamic_rtx)
3737 new = stack_pointer_rtx, offset = dynamic_offset;
3738 else if (x == virtual_outgoing_args_rtx)
3739 new = stack_pointer_rtx, offset = out_arg_offset;
3740 else if (x == virtual_cfa_rtx)
3741 new = arg_pointer_rtx, offset = cfa_offset;
3743 if (new)
3745 temp = plus_constant (new, offset);
3746 if (!validate_change (object, loc, temp, 0))
3748 if (! extra_insns)
3749 return 0;
3751 start_sequence ();
3752 temp = force_operand (temp, NULL_RTX);
3753 seq = get_insns ();
3754 end_sequence ();
3756 emit_insns_before (seq, object);
3757 if (! validate_change (object, loc, temp, 0)
3758 && ! validate_replace_rtx (x, temp, object))
3759 abort ();
3763 return 1;
3765 case ADDRESSOF:
3766 if (GET_CODE (XEXP (x, 0)) == REG)
3767 return 1;
3769 else if (GET_CODE (XEXP (x, 0)) == MEM)
3771 /* If we have a (addressof (mem ..)), do any instantiation inside
3772 since we know we'll be making the inside valid when we finally
3773 remove the ADDRESSOF. */
3774 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3775 return 1;
3777 break;
3779 default:
3780 break;
3783 /* Scan all subexpressions. */
3784 fmt = GET_RTX_FORMAT (code);
3785 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3786 if (*fmt == 'e')
3788 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3789 return 0;
3791 else if (*fmt == 'E')
3792 for (j = 0; j < XVECLEN (x, i); j++)
3793 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3794 extra_insns))
3795 return 0;
3797 return 1;
3800 /* Optimization: assuming this function does not receive nonlocal gotos,
3801 delete the handlers for such, as well as the insns to establish
3802 and disestablish them. */
3804 static void
3805 delete_handlers ()
3807 rtx insn;
3808 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3810 /* Delete the handler by turning off the flag that would
3811 prevent jump_optimize from deleting it.
3812 Also permit deletion of the nonlocal labels themselves
3813 if nothing local refers to them. */
3814 if (GET_CODE (insn) == CODE_LABEL)
3816 tree t, last_t;
3818 LABEL_PRESERVE_P (insn) = 0;
3820 /* Remove it from the nonlocal_label list, to avoid confusing
3821 flow. */
3822 for (t = nonlocal_labels, last_t = 0; t;
3823 last_t = t, t = TREE_CHAIN (t))
3824 if (DECL_RTL (TREE_VALUE (t)) == insn)
3825 break;
3826 if (t)
3828 if (! last_t)
3829 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3830 else
3831 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3834 if (GET_CODE (insn) == INSN)
3836 int can_delete = 0;
3837 rtx t;
3838 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3839 if (reg_mentioned_p (t, PATTERN (insn)))
3841 can_delete = 1;
3842 break;
3844 if (can_delete
3845 || (nonlocal_goto_stack_level != 0
3846 && reg_mentioned_p (nonlocal_goto_stack_level,
3847 PATTERN (insn))))
3848 delete_insn (insn);
3853 /* Output a USE for any register use in RTL.
3854 This is used with -noreg to mark the extent of lifespan
3855 of any registers used in a user-visible variable's DECL_RTL. */
3857 void
3858 use_variable (rtl)
3859 rtx rtl;
3861 if (GET_CODE (rtl) == REG)
3862 /* This is a register variable. */
3863 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3864 else if (GET_CODE (rtl) == MEM
3865 && GET_CODE (XEXP (rtl, 0)) == REG
3866 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3867 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3868 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3869 /* This is a variable-sized structure. */
3870 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3873 /* Like use_variable except that it outputs the USEs after INSN
3874 instead of at the end of the insn-chain. */
3876 void
3877 use_variable_after (rtl, insn)
3878 rtx rtl, insn;
3880 if (GET_CODE (rtl) == REG)
3881 /* This is a register variable. */
3882 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3883 else if (GET_CODE (rtl) == MEM
3884 && GET_CODE (XEXP (rtl, 0)) == REG
3885 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3886 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3887 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3888 /* This is a variable-sized structure. */
3889 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3893 max_parm_reg_num ()
3895 return max_parm_reg;
3898 /* Return the first insn following those generated by `assign_parms'. */
3901 get_first_nonparm_insn ()
3903 if (last_parm_insn)
3904 return NEXT_INSN (last_parm_insn);
3905 return get_insns ();
3908 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3909 Crash if there is none. */
3912 get_first_block_beg ()
3914 register rtx searcher;
3915 register rtx insn = get_first_nonparm_insn ();
3917 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3918 if (GET_CODE (searcher) == NOTE
3919 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3920 return searcher;
3922 abort (); /* Invalid call to this function. (See comments above.) */
3923 return NULL_RTX;
3926 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3927 This means a type for which function calls must pass an address to the
3928 function or get an address back from the function.
3929 EXP may be a type node or an expression (whose type is tested). */
3932 aggregate_value_p (exp)
3933 tree exp;
3935 int i, regno, nregs;
3936 rtx reg;
3937 tree type;
3938 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3939 type = exp;
3940 else
3941 type = TREE_TYPE (exp);
3943 if (RETURN_IN_MEMORY (type))
3944 return 1;
3945 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3946 and thus can't be returned in registers. */
3947 if (TREE_ADDRESSABLE (type))
3948 return 1;
3949 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3950 return 1;
3951 /* Make sure we have suitable call-clobbered regs to return
3952 the value in; if not, we must return it in memory. */
3953 reg = hard_function_value (type, 0);
3955 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3956 it is OK. */
3957 if (GET_CODE (reg) != REG)
3958 return 0;
3960 regno = REGNO (reg);
3961 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3962 for (i = 0; i < nregs; i++)
3963 if (! call_used_regs[regno + i])
3964 return 1;
3965 return 0;
3968 /* Assign RTL expressions to the function's parameters.
3969 This may involve copying them into registers and using
3970 those registers as the RTL for them. */
3972 void
3973 assign_parms (fndecl)
3974 tree fndecl;
3976 register tree parm;
3977 register rtx entry_parm = 0;
3978 register rtx stack_parm = 0;
3979 CUMULATIVE_ARGS args_so_far;
3980 enum machine_mode promoted_mode, passed_mode;
3981 enum machine_mode nominal_mode, promoted_nominal_mode;
3982 int unsignedp;
3983 /* Total space needed so far for args on the stack,
3984 given as a constant and a tree-expression. */
3985 struct args_size stack_args_size;
3986 tree fntype = TREE_TYPE (fndecl);
3987 tree fnargs = DECL_ARGUMENTS (fndecl);
3988 /* This is used for the arg pointer when referring to stack args. */
3989 rtx internal_arg_pointer;
3990 /* This is a dummy PARM_DECL that we used for the function result if
3991 the function returns a structure. */
3992 tree function_result_decl = 0;
3993 #ifdef SETUP_INCOMING_VARARGS
3994 int varargs_setup = 0;
3995 #endif
3996 rtx conversion_insns = 0;
3997 struct args_size alignment_pad;
3999 /* Nonzero if the last arg is named `__builtin_va_alist',
4000 which is used on some machines for old-fashioned non-ANSI varargs.h;
4001 this should be stuck onto the stack as if it had arrived there. */
4002 int hide_last_arg
4003 = (current_function_varargs
4004 && fnargs
4005 && (parm = tree_last (fnargs)) != 0
4006 && DECL_NAME (parm)
4007 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4008 "__builtin_va_alist")));
4010 /* Nonzero if function takes extra anonymous args.
4011 This means the last named arg must be on the stack
4012 right before the anonymous ones. */
4013 int stdarg
4014 = (TYPE_ARG_TYPES (fntype) != 0
4015 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4016 != void_type_node));
4018 current_function_stdarg = stdarg;
4020 /* If the reg that the virtual arg pointer will be translated into is
4021 not a fixed reg or is the stack pointer, make a copy of the virtual
4022 arg pointer, and address parms via the copy. The frame pointer is
4023 considered fixed even though it is not marked as such.
4025 The second time through, simply use ap to avoid generating rtx. */
4027 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4028 || ! (fixed_regs[ARG_POINTER_REGNUM]
4029 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4030 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4031 else
4032 internal_arg_pointer = virtual_incoming_args_rtx;
4033 current_function_internal_arg_pointer = internal_arg_pointer;
4035 stack_args_size.constant = 0;
4036 stack_args_size.var = 0;
4038 /* If struct value address is treated as the first argument, make it so. */
4039 if (aggregate_value_p (DECL_RESULT (fndecl))
4040 && ! current_function_returns_pcc_struct
4041 && struct_value_incoming_rtx == 0)
4043 tree type = build_pointer_type (TREE_TYPE (fntype));
4045 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4047 DECL_ARG_TYPE (function_result_decl) = type;
4048 TREE_CHAIN (function_result_decl) = fnargs;
4049 fnargs = function_result_decl;
4052 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4053 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4055 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4056 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4057 #else
4058 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4059 #endif
4061 /* We haven't yet found an argument that we must push and pretend the
4062 caller did. */
4063 current_function_pretend_args_size = 0;
4065 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4067 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4068 struct args_size stack_offset;
4069 struct args_size arg_size;
4070 int passed_pointer = 0;
4071 int did_conversion = 0;
4072 tree passed_type = DECL_ARG_TYPE (parm);
4073 tree nominal_type = TREE_TYPE (parm);
4074 int pretend_named;
4076 /* Set LAST_NAMED if this is last named arg before some
4077 anonymous args. */
4078 int last_named = ((TREE_CHAIN (parm) == 0
4079 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4080 && (stdarg || current_function_varargs));
4081 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4082 most machines, if this is a varargs/stdarg function, then we treat
4083 the last named arg as if it were anonymous too. */
4084 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4086 if (TREE_TYPE (parm) == error_mark_node
4087 /* This can happen after weird syntax errors
4088 or if an enum type is defined among the parms. */
4089 || TREE_CODE (parm) != PARM_DECL
4090 || passed_type == NULL)
4092 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4093 = gen_rtx_MEM (BLKmode, const0_rtx);
4094 TREE_USED (parm) = 1;
4095 continue;
4098 /* For varargs.h function, save info about regs and stack space
4099 used by the individual args, not including the va_alist arg. */
4100 if (hide_last_arg && last_named)
4101 current_function_args_info = args_so_far;
4103 /* Find mode of arg as it is passed, and mode of arg
4104 as it should be during execution of this function. */
4105 passed_mode = TYPE_MODE (passed_type);
4106 nominal_mode = TYPE_MODE (nominal_type);
4108 /* If the parm's mode is VOID, its value doesn't matter,
4109 and avoid the usual things like emit_move_insn that could crash. */
4110 if (nominal_mode == VOIDmode)
4112 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4113 continue;
4116 /* If the parm is to be passed as a transparent union, use the
4117 type of the first field for the tests below. We have already
4118 verified that the modes are the same. */
4119 if (DECL_TRANSPARENT_UNION (parm)
4120 || TYPE_TRANSPARENT_UNION (passed_type))
4121 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4123 /* See if this arg was passed by invisible reference. It is if
4124 it is an object whose size depends on the contents of the
4125 object itself or if the machine requires these objects be passed
4126 that way. */
4128 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4129 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4130 || TREE_ADDRESSABLE (passed_type)
4131 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4132 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4133 passed_type, named_arg)
4134 #endif
4137 passed_type = nominal_type = build_pointer_type (passed_type);
4138 passed_pointer = 1;
4139 passed_mode = nominal_mode = Pmode;
4142 promoted_mode = passed_mode;
4144 #ifdef PROMOTE_FUNCTION_ARGS
4145 /* Compute the mode in which the arg is actually extended to. */
4146 unsignedp = TREE_UNSIGNED (passed_type);
4147 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4148 #endif
4150 /* Let machine desc say which reg (if any) the parm arrives in.
4151 0 means it arrives on the stack. */
4152 #ifdef FUNCTION_INCOMING_ARG
4153 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4154 passed_type, named_arg);
4155 #else
4156 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4157 passed_type, named_arg);
4158 #endif
4160 if (entry_parm == 0)
4161 promoted_mode = passed_mode;
4163 #ifdef SETUP_INCOMING_VARARGS
4164 /* If this is the last named parameter, do any required setup for
4165 varargs or stdargs. We need to know about the case of this being an
4166 addressable type, in which case we skip the registers it
4167 would have arrived in.
4169 For stdargs, LAST_NAMED will be set for two parameters, the one that
4170 is actually the last named, and the dummy parameter. We only
4171 want to do this action once.
4173 Also, indicate when RTL generation is to be suppressed. */
4174 if (last_named && !varargs_setup)
4176 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4177 current_function_pretend_args_size, 0);
4178 varargs_setup = 1;
4180 #endif
4182 /* Determine parm's home in the stack,
4183 in case it arrives in the stack or we should pretend it did.
4185 Compute the stack position and rtx where the argument arrives
4186 and its size.
4188 There is one complexity here: If this was a parameter that would
4189 have been passed in registers, but wasn't only because it is
4190 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4191 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4192 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4193 0 as it was the previous time. */
4195 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4196 locate_and_pad_parm (promoted_mode, passed_type,
4197 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4199 #else
4200 #ifdef FUNCTION_INCOMING_ARG
4201 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4202 passed_type,
4203 pretend_named) != 0,
4204 #else
4205 FUNCTION_ARG (args_so_far, promoted_mode,
4206 passed_type,
4207 pretend_named) != 0,
4208 #endif
4209 #endif
4210 fndecl, &stack_args_size, &stack_offset, &arg_size,
4211 &alignment_pad);
4214 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4216 if (offset_rtx == const0_rtx)
4217 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4218 else
4219 stack_parm = gen_rtx_MEM (promoted_mode,
4220 gen_rtx_PLUS (Pmode,
4221 internal_arg_pointer,
4222 offset_rtx));
4224 /* If this is a memory ref that contains aggregate components,
4225 mark it as such for cse and loop optimize. Likewise if it
4226 is readonly. */
4227 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4228 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4229 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4232 /* If this parameter was passed both in registers and in the stack,
4233 use the copy on the stack. */
4234 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4235 entry_parm = 0;
4237 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4238 /* If this parm was passed part in regs and part in memory,
4239 pretend it arrived entirely in memory
4240 by pushing the register-part onto the stack.
4242 In the special case of a DImode or DFmode that is split,
4243 we could put it together in a pseudoreg directly,
4244 but for now that's not worth bothering with. */
4246 if (entry_parm)
4248 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4249 passed_type, named_arg);
4251 if (nregs > 0)
4253 current_function_pretend_args_size
4254 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4255 / (PARM_BOUNDARY / BITS_PER_UNIT)
4256 * (PARM_BOUNDARY / BITS_PER_UNIT));
4258 /* Handle calls that pass values in multiple non-contiguous
4259 locations. The Irix 6 ABI has examples of this. */
4260 if (GET_CODE (entry_parm) == PARALLEL)
4261 emit_group_store (validize_mem (stack_parm), entry_parm,
4262 int_size_in_bytes (TREE_TYPE (parm)),
4263 (TYPE_ALIGN (TREE_TYPE (parm))
4264 / BITS_PER_UNIT));
4265 else
4266 move_block_from_reg (REGNO (entry_parm),
4267 validize_mem (stack_parm), nregs,
4268 int_size_in_bytes (TREE_TYPE (parm)));
4270 entry_parm = stack_parm;
4273 #endif
4275 /* If we didn't decide this parm came in a register,
4276 by default it came on the stack. */
4277 if (entry_parm == 0)
4278 entry_parm = stack_parm;
4280 /* Record permanently how this parm was passed. */
4281 DECL_INCOMING_RTL (parm) = entry_parm;
4283 /* If there is actually space on the stack for this parm,
4284 count it in stack_args_size; otherwise set stack_parm to 0
4285 to indicate there is no preallocated stack slot for the parm. */
4287 if (entry_parm == stack_parm
4288 || (GET_CODE (entry_parm) == PARALLEL
4289 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4290 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4291 /* On some machines, even if a parm value arrives in a register
4292 there is still an (uninitialized) stack slot allocated for it.
4294 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4295 whether this parameter already has a stack slot allocated,
4296 because an arg block exists only if current_function_args_size
4297 is larger than some threshold, and we haven't calculated that
4298 yet. So, for now, we just assume that stack slots never exist
4299 in this case. */
4300 || REG_PARM_STACK_SPACE (fndecl) > 0
4301 #endif
4304 stack_args_size.constant += arg_size.constant;
4305 if (arg_size.var)
4306 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4308 else
4309 /* No stack slot was pushed for this parm. */
4310 stack_parm = 0;
4312 /* Update info on where next arg arrives in registers. */
4314 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4315 passed_type, named_arg);
4317 /* If we can't trust the parm stack slot to be aligned enough
4318 for its ultimate type, don't use that slot after entry.
4319 We'll make another stack slot, if we need one. */
4321 int thisparm_boundary
4322 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4324 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4325 stack_parm = 0;
4328 /* If parm was passed in memory, and we need to convert it on entry,
4329 don't store it back in that same slot. */
4330 if (entry_parm != 0
4331 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4332 stack_parm = 0;
4334 #if 0
4335 /* Now adjust STACK_PARM to the mode and precise location
4336 where this parameter should live during execution,
4337 if we discover that it must live in the stack during execution.
4338 To make debuggers happier on big-endian machines, we store
4339 the value in the last bytes of the space available. */
4341 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4342 && stack_parm != 0)
4344 rtx offset_rtx;
4346 if (BYTES_BIG_ENDIAN
4347 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4348 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4349 - GET_MODE_SIZE (nominal_mode));
4351 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4352 if (offset_rtx == const0_rtx)
4353 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4354 else
4355 stack_parm = gen_rtx_MEM (nominal_mode,
4356 gen_rtx_PLUS (Pmode,
4357 internal_arg_pointer,
4358 offset_rtx));
4360 /* If this is a memory ref that contains aggregate components,
4361 mark it as such for cse and loop optimize. */
4362 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4364 #endif /* 0 */
4366 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4367 in the mode in which it arrives.
4368 STACK_PARM is an RTX for a stack slot where the parameter can live
4369 during the function (in case we want to put it there).
4370 STACK_PARM is 0 if no stack slot was pushed for it.
4372 Now output code if necessary to convert ENTRY_PARM to
4373 the type in which this function declares it,
4374 and store that result in an appropriate place,
4375 which may be a pseudo reg, may be STACK_PARM,
4376 or may be a local stack slot if STACK_PARM is 0.
4378 Set DECL_RTL to that place. */
4380 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4382 /* If a BLKmode arrives in registers, copy it to a stack slot.
4383 Handle calls that pass values in multiple non-contiguous
4384 locations. The Irix 6 ABI has examples of this. */
4385 if (GET_CODE (entry_parm) == REG
4386 || GET_CODE (entry_parm) == PARALLEL)
4388 int size_stored
4389 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4390 UNITS_PER_WORD);
4392 /* Note that we will be storing an integral number of words.
4393 So we have to be careful to ensure that we allocate an
4394 integral number of words. We do this below in the
4395 assign_stack_local if space was not allocated in the argument
4396 list. If it was, this will not work if PARM_BOUNDARY is not
4397 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4398 if it becomes a problem. */
4400 if (stack_parm == 0)
4402 stack_parm
4403 = assign_stack_local (GET_MODE (entry_parm),
4404 size_stored, 0);
4406 /* If this is a memory ref that contains aggregate
4407 components, mark it as such for cse and loop optimize. */
4408 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4411 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4412 abort ();
4414 if (TREE_READONLY (parm))
4415 RTX_UNCHANGING_P (stack_parm) = 1;
4417 /* Handle calls that pass values in multiple non-contiguous
4418 locations. The Irix 6 ABI has examples of this. */
4419 if (GET_CODE (entry_parm) == PARALLEL)
4420 emit_group_store (validize_mem (stack_parm), entry_parm,
4421 int_size_in_bytes (TREE_TYPE (parm)),
4422 (TYPE_ALIGN (TREE_TYPE (parm))
4423 / BITS_PER_UNIT));
4424 else
4425 move_block_from_reg (REGNO (entry_parm),
4426 validize_mem (stack_parm),
4427 size_stored / UNITS_PER_WORD,
4428 int_size_in_bytes (TREE_TYPE (parm)));
4430 DECL_RTL (parm) = stack_parm;
4432 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4433 && ! DECL_INLINE (fndecl))
4434 /* layout_decl may set this. */
4435 || TREE_ADDRESSABLE (parm)
4436 || TREE_SIDE_EFFECTS (parm)
4437 /* If -ffloat-store specified, don't put explicit
4438 float variables into registers. */
4439 || (flag_float_store
4440 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4441 /* Always assign pseudo to structure return or item passed
4442 by invisible reference. */
4443 || passed_pointer || parm == function_result_decl)
4445 /* Store the parm in a pseudoregister during the function, but we
4446 may need to do it in a wider mode. */
4448 register rtx parmreg;
4449 int regno, regnoi = 0, regnor = 0;
4451 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4453 promoted_nominal_mode
4454 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4456 parmreg = gen_reg_rtx (promoted_nominal_mode);
4457 mark_user_reg (parmreg);
4459 /* If this was an item that we received a pointer to, set DECL_RTL
4460 appropriately. */
4461 if (passed_pointer)
4463 DECL_RTL (parm)
4464 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4465 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4467 else
4468 DECL_RTL (parm) = parmreg;
4470 /* Copy the value into the register. */
4471 if (nominal_mode != passed_mode
4472 || promoted_nominal_mode != promoted_mode)
4474 int save_tree_used;
4475 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4476 mode, by the caller. We now have to convert it to
4477 NOMINAL_MODE, if different. However, PARMREG may be in
4478 a different mode than NOMINAL_MODE if it is being stored
4479 promoted.
4481 If ENTRY_PARM is a hard register, it might be in a register
4482 not valid for operating in its mode (e.g., an odd-numbered
4483 register for a DFmode). In that case, moves are the only
4484 thing valid, so we can't do a convert from there. This
4485 occurs when the calling sequence allow such misaligned
4486 usages.
4488 In addition, the conversion may involve a call, which could
4489 clobber parameters which haven't been copied to pseudo
4490 registers yet. Therefore, we must first copy the parm to
4491 a pseudo reg here, and save the conversion until after all
4492 parameters have been moved. */
4494 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4496 emit_move_insn (tempreg, validize_mem (entry_parm));
4498 push_to_sequence (conversion_insns);
4499 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4501 /* TREE_USED gets set erroneously during expand_assignment. */
4502 save_tree_used = TREE_USED (parm);
4503 expand_assignment (parm,
4504 make_tree (nominal_type, tempreg), 0, 0);
4505 TREE_USED (parm) = save_tree_used;
4506 conversion_insns = get_insns ();
4507 did_conversion = 1;
4508 end_sequence ();
4510 else
4511 emit_move_insn (parmreg, validize_mem (entry_parm));
4513 /* If we were passed a pointer but the actual value
4514 can safely live in a register, put it in one. */
4515 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4516 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4517 && ! DECL_INLINE (fndecl))
4518 /* layout_decl may set this. */
4519 || TREE_ADDRESSABLE (parm)
4520 || TREE_SIDE_EFFECTS (parm)
4521 /* If -ffloat-store specified, don't put explicit
4522 float variables into registers. */
4523 || (flag_float_store
4524 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4526 /* We can't use nominal_mode, because it will have been set to
4527 Pmode above. We must use the actual mode of the parm. */
4528 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4529 mark_user_reg (parmreg);
4530 emit_move_insn (parmreg, DECL_RTL (parm));
4531 DECL_RTL (parm) = parmreg;
4532 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4533 now the parm. */
4534 stack_parm = 0;
4536 #ifdef FUNCTION_ARG_CALLEE_COPIES
4537 /* If we are passed an arg by reference and it is our responsibility
4538 to make a copy, do it now.
4539 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4540 original argument, so we must recreate them in the call to
4541 FUNCTION_ARG_CALLEE_COPIES. */
4542 /* ??? Later add code to handle the case that if the argument isn't
4543 modified, don't do the copy. */
4545 else if (passed_pointer
4546 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4547 TYPE_MODE (DECL_ARG_TYPE (parm)),
4548 DECL_ARG_TYPE (parm),
4549 named_arg)
4550 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4552 rtx copy;
4553 tree type = DECL_ARG_TYPE (parm);
4555 /* This sequence may involve a library call perhaps clobbering
4556 registers that haven't been copied to pseudos yet. */
4558 push_to_sequence (conversion_insns);
4560 if (TYPE_SIZE (type) == 0
4561 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4562 /* This is a variable sized object. */
4563 copy = gen_rtx_MEM (BLKmode,
4564 allocate_dynamic_stack_space
4565 (expr_size (parm), NULL_RTX,
4566 TYPE_ALIGN (type)));
4567 else
4568 copy = assign_stack_temp (TYPE_MODE (type),
4569 int_size_in_bytes (type), 1);
4570 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4571 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4573 store_expr (parm, copy, 0);
4574 emit_move_insn (parmreg, XEXP (copy, 0));
4575 if (current_function_check_memory_usage)
4576 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4577 XEXP (copy, 0), Pmode,
4578 GEN_INT (int_size_in_bytes (type)),
4579 TYPE_MODE (sizetype),
4580 GEN_INT (MEMORY_USE_RW),
4581 TYPE_MODE (integer_type_node));
4582 conversion_insns = get_insns ();
4583 did_conversion = 1;
4584 end_sequence ();
4586 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4588 /* In any case, record the parm's desired stack location
4589 in case we later discover it must live in the stack.
4591 If it is a COMPLEX value, store the stack location for both
4592 halves. */
4594 if (GET_CODE (parmreg) == CONCAT)
4595 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4596 else
4597 regno = REGNO (parmreg);
4599 if (regno >= max_parm_reg)
4601 rtx *new;
4602 int old_max_parm_reg = max_parm_reg;
4604 /* It's slow to expand this one register at a time,
4605 but it's also rare and we need max_parm_reg to be
4606 precisely correct. */
4607 max_parm_reg = regno + 1;
4608 new = (rtx *) xrealloc (parm_reg_stack_loc,
4609 max_parm_reg * sizeof (rtx));
4610 bzero ((char *) (new + old_max_parm_reg),
4611 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4612 parm_reg_stack_loc = new;
4615 if (GET_CODE (parmreg) == CONCAT)
4617 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4619 regnor = REGNO (gen_realpart (submode, parmreg));
4620 regnoi = REGNO (gen_imagpart (submode, parmreg));
4622 if (stack_parm != 0)
4624 parm_reg_stack_loc[regnor]
4625 = gen_realpart (submode, stack_parm);
4626 parm_reg_stack_loc[regnoi]
4627 = gen_imagpart (submode, stack_parm);
4629 else
4631 parm_reg_stack_loc[regnor] = 0;
4632 parm_reg_stack_loc[regnoi] = 0;
4635 else
4636 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4638 /* Mark the register as eliminable if we did no conversion
4639 and it was copied from memory at a fixed offset,
4640 and the arg pointer was not copied to a pseudo-reg.
4641 If the arg pointer is a pseudo reg or the offset formed
4642 an invalid address, such memory-equivalences
4643 as we make here would screw up life analysis for it. */
4644 if (nominal_mode == passed_mode
4645 && ! did_conversion
4646 && stack_parm != 0
4647 && GET_CODE (stack_parm) == MEM
4648 && stack_offset.var == 0
4649 && reg_mentioned_p (virtual_incoming_args_rtx,
4650 XEXP (stack_parm, 0)))
4652 rtx linsn = get_last_insn ();
4653 rtx sinsn, set;
4655 /* Mark complex types separately. */
4656 if (GET_CODE (parmreg) == CONCAT)
4657 /* Scan backwards for the set of the real and
4658 imaginary parts. */
4659 for (sinsn = linsn; sinsn != 0;
4660 sinsn = prev_nonnote_insn (sinsn))
4662 set = single_set (sinsn);
4663 if (set != 0
4664 && SET_DEST (set) == regno_reg_rtx [regnoi])
4665 REG_NOTES (sinsn)
4666 = gen_rtx_EXPR_LIST (REG_EQUIV,
4667 parm_reg_stack_loc[regnoi],
4668 REG_NOTES (sinsn));
4669 else if (set != 0
4670 && SET_DEST (set) == regno_reg_rtx [regnor])
4671 REG_NOTES (sinsn)
4672 = gen_rtx_EXPR_LIST (REG_EQUIV,
4673 parm_reg_stack_loc[regnor],
4674 REG_NOTES (sinsn));
4676 else if ((set = single_set (linsn)) != 0
4677 && SET_DEST (set) == parmreg)
4678 REG_NOTES (linsn)
4679 = gen_rtx_EXPR_LIST (REG_EQUIV,
4680 stack_parm, REG_NOTES (linsn));
4683 /* For pointer data type, suggest pointer register. */
4684 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4685 mark_reg_pointer (parmreg,
4686 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4687 / BITS_PER_UNIT));
4689 else
4691 /* Value must be stored in the stack slot STACK_PARM
4692 during function execution. */
4694 if (promoted_mode != nominal_mode)
4696 /* Conversion is required. */
4697 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4699 emit_move_insn (tempreg, validize_mem (entry_parm));
4701 push_to_sequence (conversion_insns);
4702 entry_parm = convert_to_mode (nominal_mode, tempreg,
4703 TREE_UNSIGNED (TREE_TYPE (parm)));
4704 if (stack_parm)
4706 /* ??? This may need a big-endian conversion on sparc64. */
4707 stack_parm = change_address (stack_parm, nominal_mode,
4708 NULL_RTX);
4710 conversion_insns = get_insns ();
4711 did_conversion = 1;
4712 end_sequence ();
4715 if (entry_parm != stack_parm)
4717 if (stack_parm == 0)
4719 stack_parm
4720 = assign_stack_local (GET_MODE (entry_parm),
4721 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4722 /* If this is a memory ref that contains aggregate components,
4723 mark it as such for cse and loop optimize. */
4724 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4727 if (promoted_mode != nominal_mode)
4729 push_to_sequence (conversion_insns);
4730 emit_move_insn (validize_mem (stack_parm),
4731 validize_mem (entry_parm));
4732 conversion_insns = get_insns ();
4733 end_sequence ();
4735 else
4736 emit_move_insn (validize_mem (stack_parm),
4737 validize_mem (entry_parm));
4739 if (current_function_check_memory_usage)
4741 push_to_sequence (conversion_insns);
4742 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4743 XEXP (stack_parm, 0), Pmode,
4744 GEN_INT (GET_MODE_SIZE (GET_MODE
4745 (entry_parm))),
4746 TYPE_MODE (sizetype),
4747 GEN_INT (MEMORY_USE_RW),
4748 TYPE_MODE (integer_type_node));
4750 conversion_insns = get_insns ();
4751 end_sequence ();
4753 DECL_RTL (parm) = stack_parm;
4756 /* If this "parameter" was the place where we are receiving the
4757 function's incoming structure pointer, set up the result. */
4758 if (parm == function_result_decl)
4760 tree result = DECL_RESULT (fndecl);
4761 tree restype = TREE_TYPE (result);
4763 DECL_RTL (result)
4764 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4766 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4767 AGGREGATE_TYPE_P (restype));
4770 if (TREE_THIS_VOLATILE (parm))
4771 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4772 if (TREE_READONLY (parm))
4773 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4776 /* Output all parameter conversion instructions (possibly including calls)
4777 now that all parameters have been copied out of hard registers. */
4778 emit_insns (conversion_insns);
4780 last_parm_insn = get_last_insn ();
4782 current_function_args_size = stack_args_size.constant;
4784 /* Adjust function incoming argument size for alignment and
4785 minimum length. */
4787 #ifdef REG_PARM_STACK_SPACE
4788 #ifndef MAYBE_REG_PARM_STACK_SPACE
4789 current_function_args_size = MAX (current_function_args_size,
4790 REG_PARM_STACK_SPACE (fndecl));
4791 #endif
4792 #endif
4794 #ifdef STACK_BOUNDARY
4795 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4797 current_function_args_size
4798 = ((current_function_args_size + STACK_BYTES - 1)
4799 / STACK_BYTES) * STACK_BYTES;
4800 #endif
4802 #ifdef ARGS_GROW_DOWNWARD
4803 current_function_arg_offset_rtx
4804 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4805 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4806 size_int (-stack_args_size.constant)),
4807 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4808 #else
4809 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4810 #endif
4812 /* See how many bytes, if any, of its args a function should try to pop
4813 on return. */
4815 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4816 current_function_args_size);
4818 /* For stdarg.h function, save info about
4819 regs and stack space used by the named args. */
4821 if (!hide_last_arg)
4822 current_function_args_info = args_so_far;
4824 /* Set the rtx used for the function return value. Put this in its
4825 own variable so any optimizers that need this information don't have
4826 to include tree.h. Do this here so it gets done when an inlined
4827 function gets output. */
4829 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4832 /* Indicate whether REGNO is an incoming argument to the current function
4833 that was promoted to a wider mode. If so, return the RTX for the
4834 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4835 that REGNO is promoted from and whether the promotion was signed or
4836 unsigned. */
4838 #ifdef PROMOTE_FUNCTION_ARGS
4841 promoted_input_arg (regno, pmode, punsignedp)
4842 int regno;
4843 enum machine_mode *pmode;
4844 int *punsignedp;
4846 tree arg;
4848 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4849 arg = TREE_CHAIN (arg))
4850 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4851 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4852 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4854 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4855 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4857 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4858 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4859 && mode != DECL_MODE (arg))
4861 *pmode = DECL_MODE (arg);
4862 *punsignedp = unsignedp;
4863 return DECL_INCOMING_RTL (arg);
4867 return 0;
4870 #endif
4872 /* Compute the size and offset from the start of the stacked arguments for a
4873 parm passed in mode PASSED_MODE and with type TYPE.
4875 INITIAL_OFFSET_PTR points to the current offset into the stacked
4876 arguments.
4878 The starting offset and size for this parm are returned in *OFFSET_PTR
4879 and *ARG_SIZE_PTR, respectively.
4881 IN_REGS is non-zero if the argument will be passed in registers. It will
4882 never be set if REG_PARM_STACK_SPACE is not defined.
4884 FNDECL is the function in which the argument was defined.
4886 There are two types of rounding that are done. The first, controlled by
4887 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4888 list to be aligned to the specific boundary (in bits). This rounding
4889 affects the initial and starting offsets, but not the argument size.
4891 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4892 optionally rounds the size of the parm to PARM_BOUNDARY. The
4893 initial offset is not affected by this rounding, while the size always
4894 is and the starting offset may be. */
4896 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4897 initial_offset_ptr is positive because locate_and_pad_parm's
4898 callers pass in the total size of args so far as
4899 initial_offset_ptr. arg_size_ptr is always positive.*/
4901 void
4902 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4903 initial_offset_ptr, offset_ptr, arg_size_ptr,
4904 alignment_pad)
4905 enum machine_mode passed_mode;
4906 tree type;
4907 int in_regs;
4908 tree fndecl ATTRIBUTE_UNUSED;
4909 struct args_size *initial_offset_ptr;
4910 struct args_size *offset_ptr;
4911 struct args_size *arg_size_ptr;
4912 struct args_size *alignment_pad;
4915 tree sizetree
4916 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4917 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4918 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4920 #ifdef REG_PARM_STACK_SPACE
4921 /* If we have found a stack parm before we reach the end of the
4922 area reserved for registers, skip that area. */
4923 if (! in_regs)
4925 int reg_parm_stack_space = 0;
4927 #ifdef MAYBE_REG_PARM_STACK_SPACE
4928 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4929 #else
4930 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4931 #endif
4932 if (reg_parm_stack_space > 0)
4934 if (initial_offset_ptr->var)
4936 initial_offset_ptr->var
4937 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4938 size_int (reg_parm_stack_space));
4939 initial_offset_ptr->constant = 0;
4941 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4942 initial_offset_ptr->constant = reg_parm_stack_space;
4945 #endif /* REG_PARM_STACK_SPACE */
4947 arg_size_ptr->var = 0;
4948 arg_size_ptr->constant = 0;
4950 #ifdef ARGS_GROW_DOWNWARD
4951 if (initial_offset_ptr->var)
4953 offset_ptr->constant = 0;
4954 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4955 initial_offset_ptr->var);
4957 else
4959 offset_ptr->constant = - initial_offset_ptr->constant;
4960 offset_ptr->var = 0;
4962 if (where_pad != none
4963 && (TREE_CODE (sizetree) != INTEGER_CST
4964 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4965 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4966 SUB_PARM_SIZE (*offset_ptr, sizetree);
4967 if (where_pad != downward)
4968 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
4969 if (initial_offset_ptr->var)
4971 arg_size_ptr->var = size_binop (MINUS_EXPR,
4972 size_binop (MINUS_EXPR,
4973 integer_zero_node,
4974 initial_offset_ptr->var),
4975 offset_ptr->var);
4977 else
4979 arg_size_ptr->constant = (- initial_offset_ptr->constant
4980 - offset_ptr->constant);
4982 #else /* !ARGS_GROW_DOWNWARD */
4983 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
4984 *offset_ptr = *initial_offset_ptr;
4986 #ifdef PUSH_ROUNDING
4987 if (passed_mode != BLKmode)
4988 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4989 #endif
4991 /* Pad_below needs the pre-rounded size to know how much to pad below
4992 so this must be done before rounding up. */
4993 if (where_pad == downward
4994 /* However, BLKmode args passed in regs have their padding done elsewhere.
4995 The stack slot must be able to hold the entire register. */
4996 && !(in_regs && passed_mode == BLKmode))
4997 pad_below (offset_ptr, passed_mode, sizetree);
4999 if (where_pad != none
5000 && (TREE_CODE (sizetree) != INTEGER_CST
5001 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5002 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5004 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5005 #endif /* ARGS_GROW_DOWNWARD */
5008 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5009 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5011 static void
5012 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5013 struct args_size *offset_ptr;
5014 int boundary;
5015 struct args_size *alignment_pad;
5017 tree save_var;
5018 HOST_WIDE_INT save_constant;
5020 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5022 if (boundary > PARM_BOUNDARY)
5024 save_var = offset_ptr->var;
5025 save_constant = offset_ptr->constant;
5028 alignment_pad->var = NULL_TREE;
5029 alignment_pad->constant = 0;
5031 if (boundary > BITS_PER_UNIT)
5033 if (offset_ptr->var)
5035 offset_ptr->var =
5036 #ifdef ARGS_GROW_DOWNWARD
5037 round_down
5038 #else
5039 round_up
5040 #endif
5041 (ARGS_SIZE_TREE (*offset_ptr),
5042 boundary / BITS_PER_UNIT);
5043 offset_ptr->constant = 0; /*?*/
5044 if (boundary > PARM_BOUNDARY)
5045 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5047 else
5049 offset_ptr->constant =
5050 #ifdef ARGS_GROW_DOWNWARD
5051 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5052 #else
5053 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5054 #endif
5055 if (boundary > PARM_BOUNDARY)
5056 alignment_pad->constant = offset_ptr->constant - save_constant;
5061 #ifndef ARGS_GROW_DOWNWARD
5062 static void
5063 pad_below (offset_ptr, passed_mode, sizetree)
5064 struct args_size *offset_ptr;
5065 enum machine_mode passed_mode;
5066 tree sizetree;
5068 if (passed_mode != BLKmode)
5070 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5071 offset_ptr->constant
5072 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5073 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5074 - GET_MODE_SIZE (passed_mode));
5076 else
5078 if (TREE_CODE (sizetree) != INTEGER_CST
5079 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5081 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5082 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5083 /* Add it in. */
5084 ADD_PARM_SIZE (*offset_ptr, s2);
5085 SUB_PARM_SIZE (*offset_ptr, sizetree);
5089 #endif
5091 #ifdef ARGS_GROW_DOWNWARD
5092 static tree
5093 round_down (value, divisor)
5094 tree value;
5095 int divisor;
5097 return size_binop (MULT_EXPR,
5098 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5099 size_int (divisor));
5101 #endif
5103 /* Walk the tree of blocks describing the binding levels within a function
5104 and warn about uninitialized variables.
5105 This is done after calling flow_analysis and before global_alloc
5106 clobbers the pseudo-regs to hard regs. */
5108 void
5109 uninitialized_vars_warning (block)
5110 tree block;
5112 register tree decl, sub;
5113 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5115 if (warn_uninitialized
5116 && TREE_CODE (decl) == VAR_DECL
5117 /* These warnings are unreliable for and aggregates
5118 because assigning the fields one by one can fail to convince
5119 flow.c that the entire aggregate was initialized.
5120 Unions are troublesome because members may be shorter. */
5121 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5122 && DECL_RTL (decl) != 0
5123 && GET_CODE (DECL_RTL (decl)) == REG
5124 /* Global optimizations can make it difficult to determine if a
5125 particular variable has been initialized. However, a VAR_DECL
5126 with a nonzero DECL_INITIAL had an initializer, so do not
5127 claim it is potentially uninitialized.
5129 We do not care about the actual value in DECL_INITIAL, so we do
5130 not worry that it may be a dangling pointer. */
5131 && DECL_INITIAL (decl) == NULL_TREE
5132 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5133 warning_with_decl (decl,
5134 "`%s' might be used uninitialized in this function");
5135 if (extra_warnings
5136 && TREE_CODE (decl) == VAR_DECL
5137 && DECL_RTL (decl) != 0
5138 && GET_CODE (DECL_RTL (decl)) == REG
5139 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5140 warning_with_decl (decl,
5141 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5143 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5144 uninitialized_vars_warning (sub);
5147 /* Do the appropriate part of uninitialized_vars_warning
5148 but for arguments instead of local variables. */
5150 void
5151 setjmp_args_warning ()
5153 register tree decl;
5154 for (decl = DECL_ARGUMENTS (current_function_decl);
5155 decl; decl = TREE_CHAIN (decl))
5156 if (DECL_RTL (decl) != 0
5157 && GET_CODE (DECL_RTL (decl)) == REG
5158 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5159 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5162 /* If this function call setjmp, put all vars into the stack
5163 unless they were declared `register'. */
5165 void
5166 setjmp_protect (block)
5167 tree block;
5169 register tree decl, sub;
5170 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5171 if ((TREE_CODE (decl) == VAR_DECL
5172 || TREE_CODE (decl) == PARM_DECL)
5173 && DECL_RTL (decl) != 0
5174 && (GET_CODE (DECL_RTL (decl)) == REG
5175 || (GET_CODE (DECL_RTL (decl)) == MEM
5176 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5177 /* If this variable came from an inline function, it must be
5178 that its life doesn't overlap the setjmp. If there was a
5179 setjmp in the function, it would already be in memory. We
5180 must exclude such variable because their DECL_RTL might be
5181 set to strange things such as virtual_stack_vars_rtx. */
5182 && ! DECL_FROM_INLINE (decl)
5183 && (
5184 #ifdef NON_SAVING_SETJMP
5185 /* If longjmp doesn't restore the registers,
5186 don't put anything in them. */
5187 NON_SAVING_SETJMP
5189 #endif
5190 ! DECL_REGISTER (decl)))
5191 put_var_into_stack (decl);
5192 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5193 setjmp_protect (sub);
5196 /* Like the previous function, but for args instead of local variables. */
5198 void
5199 setjmp_protect_args ()
5201 register tree decl;
5202 for (decl = DECL_ARGUMENTS (current_function_decl);
5203 decl; decl = TREE_CHAIN (decl))
5204 if ((TREE_CODE (decl) == VAR_DECL
5205 || TREE_CODE (decl) == PARM_DECL)
5206 && DECL_RTL (decl) != 0
5207 && (GET_CODE (DECL_RTL (decl)) == REG
5208 || (GET_CODE (DECL_RTL (decl)) == MEM
5209 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5210 && (
5211 /* If longjmp doesn't restore the registers,
5212 don't put anything in them. */
5213 #ifdef NON_SAVING_SETJMP
5214 NON_SAVING_SETJMP
5216 #endif
5217 ! DECL_REGISTER (decl)))
5218 put_var_into_stack (decl);
5221 /* Return the context-pointer register corresponding to DECL,
5222 or 0 if it does not need one. */
5225 lookup_static_chain (decl)
5226 tree decl;
5228 tree context = decl_function_context (decl);
5229 tree link;
5231 if (context == 0
5232 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5233 return 0;
5235 /* We treat inline_function_decl as an alias for the current function
5236 because that is the inline function whose vars, types, etc.
5237 are being merged into the current function.
5238 See expand_inline_function. */
5239 if (context == current_function_decl || context == inline_function_decl)
5240 return virtual_stack_vars_rtx;
5242 for (link = context_display; link; link = TREE_CHAIN (link))
5243 if (TREE_PURPOSE (link) == context)
5244 return RTL_EXPR_RTL (TREE_VALUE (link));
5246 abort ();
5249 /* Convert a stack slot address ADDR for variable VAR
5250 (from a containing function)
5251 into an address valid in this function (using a static chain). */
5254 fix_lexical_addr (addr, var)
5255 rtx addr;
5256 tree var;
5258 rtx basereg;
5259 HOST_WIDE_INT displacement;
5260 tree context = decl_function_context (var);
5261 struct function *fp;
5262 rtx base = 0;
5264 /* If this is the present function, we need not do anything. */
5265 if (context == current_function_decl || context == inline_function_decl)
5266 return addr;
5268 for (fp = outer_function_chain; fp; fp = fp->next)
5269 if (fp->decl == context)
5270 break;
5272 if (fp == 0)
5273 abort ();
5275 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5276 addr = XEXP (XEXP (addr, 0), 0);
5278 /* Decode given address as base reg plus displacement. */
5279 if (GET_CODE (addr) == REG)
5280 basereg = addr, displacement = 0;
5281 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5282 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5283 else
5284 abort ();
5286 /* We accept vars reached via the containing function's
5287 incoming arg pointer and via its stack variables pointer. */
5288 if (basereg == fp->internal_arg_pointer)
5290 /* If reached via arg pointer, get the arg pointer value
5291 out of that function's stack frame.
5293 There are two cases: If a separate ap is needed, allocate a
5294 slot in the outer function for it and dereference it that way.
5295 This is correct even if the real ap is actually a pseudo.
5296 Otherwise, just adjust the offset from the frame pointer to
5297 compensate. */
5299 #ifdef NEED_SEPARATE_AP
5300 rtx addr;
5302 if (fp->x_arg_pointer_save_area == 0)
5303 fp->x_arg_pointer_save_area
5304 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5306 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5307 addr = memory_address (Pmode, addr);
5309 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5310 #else
5311 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5312 base = lookup_static_chain (var);
5313 #endif
5316 else if (basereg == virtual_stack_vars_rtx)
5318 /* This is the same code as lookup_static_chain, duplicated here to
5319 avoid an extra call to decl_function_context. */
5320 tree link;
5322 for (link = context_display; link; link = TREE_CHAIN (link))
5323 if (TREE_PURPOSE (link) == context)
5325 base = RTL_EXPR_RTL (TREE_VALUE (link));
5326 break;
5330 if (base == 0)
5331 abort ();
5333 /* Use same offset, relative to appropriate static chain or argument
5334 pointer. */
5335 return plus_constant (base, displacement);
5338 /* Return the address of the trampoline for entering nested fn FUNCTION.
5339 If necessary, allocate a trampoline (in the stack frame)
5340 and emit rtl to initialize its contents (at entry to this function). */
5343 trampoline_address (function)
5344 tree function;
5346 tree link;
5347 tree rtlexp;
5348 rtx tramp;
5349 struct function *fp;
5350 tree fn_context;
5352 /* Find an existing trampoline and return it. */
5353 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5354 if (TREE_PURPOSE (link) == function)
5355 return
5356 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5358 for (fp = outer_function_chain; fp; fp = fp->next)
5359 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5360 if (TREE_PURPOSE (link) == function)
5362 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5363 function);
5364 return round_trampoline_addr (tramp);
5367 /* None exists; we must make one. */
5369 /* Find the `struct function' for the function containing FUNCTION. */
5370 fp = 0;
5371 fn_context = decl_function_context (function);
5372 if (fn_context != current_function_decl
5373 && fn_context != inline_function_decl)
5374 for (fp = outer_function_chain; fp; fp = fp->next)
5375 if (fp->decl == fn_context)
5376 break;
5378 /* Allocate run-time space for this trampoline
5379 (usually in the defining function's stack frame). */
5380 #ifdef ALLOCATE_TRAMPOLINE
5381 tramp = ALLOCATE_TRAMPOLINE (fp);
5382 #else
5383 /* If rounding needed, allocate extra space
5384 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5385 #ifdef TRAMPOLINE_ALIGNMENT
5386 #define TRAMPOLINE_REAL_SIZE \
5387 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5388 #else
5389 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5390 #endif
5391 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5392 fp ? fp : current_function);
5393 #endif
5395 /* Record the trampoline for reuse and note it for later initialization
5396 by expand_function_end. */
5397 if (fp != 0)
5399 push_obstacks (fp->function_maybepermanent_obstack,
5400 fp->function_maybepermanent_obstack);
5401 rtlexp = make_node (RTL_EXPR);
5402 RTL_EXPR_RTL (rtlexp) = tramp;
5403 fp->x_trampoline_list = tree_cons (function, rtlexp,
5404 fp->x_trampoline_list);
5405 pop_obstacks ();
5407 else
5409 /* Make the RTL_EXPR node temporary, not momentary, so that the
5410 trampoline_list doesn't become garbage. */
5411 int momentary = suspend_momentary ();
5412 rtlexp = make_node (RTL_EXPR);
5413 resume_momentary (momentary);
5415 RTL_EXPR_RTL (rtlexp) = tramp;
5416 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5419 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5420 return round_trampoline_addr (tramp);
5423 /* Given a trampoline address,
5424 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5426 static rtx
5427 round_trampoline_addr (tramp)
5428 rtx tramp;
5430 #ifdef TRAMPOLINE_ALIGNMENT
5431 /* Round address up to desired boundary. */
5432 rtx temp = gen_reg_rtx (Pmode);
5433 temp = expand_binop (Pmode, add_optab, tramp,
5434 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5435 temp, 0, OPTAB_LIB_WIDEN);
5436 tramp = expand_binop (Pmode, and_optab, temp,
5437 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5438 temp, 0, OPTAB_LIB_WIDEN);
5439 #endif
5440 return tramp;
5443 /* Insert the BLOCK in the block-tree before LAST_INSN. */
5445 void
5446 retrofit_block (block, last_insn)
5447 tree block;
5448 rtx last_insn;
5450 rtx insn;
5452 /* Now insert the new BLOCK at the right place in the block trees
5453 for the function which called the inline function. We just look
5454 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5455 beginning of a block, then this new block becomes the first
5456 subblock of that block. If we find the end of a block, then this
5457 new block follows that block in the list of blocks. */
5458 for (insn = last_insn; insn; insn = PREV_INSN (insn))
5459 if (GET_CODE (insn) == NOTE
5460 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
5461 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
5462 break;
5463 if (!insn || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5465 tree superblock;
5467 if (insn)
5468 superblock = NOTE_BLOCK (insn);
5469 else
5470 superblock = DECL_INITIAL (current_function_decl);
5472 BLOCK_SUPERCONTEXT (block) = superblock;
5473 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (superblock);
5474 BLOCK_SUBBLOCKS (superblock) = block;
5476 else
5478 tree prevblock = NOTE_BLOCK (insn);
5480 BLOCK_SUPERCONTEXT (block) = BLOCK_SUPERCONTEXT (prevblock);
5481 BLOCK_CHAIN (block) = BLOCK_CHAIN (prevblock);
5482 BLOCK_CHAIN (prevblock) = block;
5486 /* The functions identify_blocks and reorder_blocks provide a way to
5487 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5488 duplicate portions of the RTL code. Call identify_blocks before
5489 changing the RTL, and call reorder_blocks after. */
5491 /* Put all this function's BLOCK nodes including those that are chained
5492 onto the first block into a vector, and return it.
5493 Also store in each NOTE for the beginning or end of a block
5494 the index of that block in the vector.
5495 The arguments are BLOCK, the chain of top-level blocks of the function,
5496 and INSNS, the insn chain of the function. */
5498 void
5499 identify_blocks (block, insns)
5500 tree block;
5501 rtx insns;
5503 int n_blocks;
5504 tree *block_vector;
5505 tree *block_stack;
5506 int depth = 0;
5507 int current_block_number = 1;
5508 rtx insn;
5510 if (block == 0)
5511 return;
5513 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5514 depth-first order. */
5515 n_blocks = all_blocks (block, 0);
5516 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5517 all_blocks (block, block_vector);
5519 block_stack = (tree *) alloca (n_blocks * sizeof (tree));
5521 for (insn = insns; insn; insn = NEXT_INSN (insn))
5522 if (GET_CODE (insn) == NOTE)
5524 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5526 tree b;
5528 /* If there are more block notes than BLOCKs, something
5529 is badly wrong. */
5530 if (current_block_number == n_blocks)
5531 abort ();
5533 b = block_vector[current_block_number++];
5534 NOTE_BLOCK (insn) = b;
5535 block_stack[depth++] = b;
5537 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5539 if (depth == 0)
5540 /* There are more NOTE_INSN_BLOCK_ENDs that
5541 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5542 abort ();
5544 NOTE_BLOCK (insn) = block_stack[--depth];
5548 /* In whole-function mode, we might not have seen the whole function
5549 yet, so we might not use up all the blocks. */
5550 if (n_blocks != current_block_number
5551 && !current_function->x_whole_function_mode_p)
5552 abort ();
5554 free (block_vector);
5557 /* Given a revised instruction chain, rebuild the tree structure of
5558 BLOCK nodes to correspond to the new order of RTL. The new block
5559 tree is inserted below TOP_BLOCK. Returns the current top-level
5560 block. */
5562 tree
5563 reorder_blocks (block, insns)
5564 tree block;
5565 rtx insns;
5567 tree current_block = block;
5568 rtx insn;
5570 if (block == NULL_TREE)
5571 return NULL_TREE;
5573 /* Prune the old trees away, so that it doesn't get in the way. */
5574 BLOCK_SUBBLOCKS (current_block) = 0;
5575 BLOCK_CHAIN (current_block) = 0;
5577 for (insn = insns; insn; insn = NEXT_INSN (insn))
5578 if (GET_CODE (insn) == NOTE)
5580 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5582 tree block = NOTE_BLOCK (insn);
5583 /* If we have seen this block before, copy it. */
5584 if (TREE_ASM_WRITTEN (block))
5585 block = copy_node (block);
5586 BLOCK_SUBBLOCKS (block) = 0;
5587 TREE_ASM_WRITTEN (block) = 1;
5588 BLOCK_SUPERCONTEXT (block) = current_block;
5589 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5590 BLOCK_SUBBLOCKS (current_block) = block;
5591 current_block = block;
5592 NOTE_SOURCE_FILE (insn) = 0;
5594 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5596 BLOCK_SUBBLOCKS (current_block)
5597 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5598 current_block = BLOCK_SUPERCONTEXT (current_block);
5599 NOTE_SOURCE_FILE (insn) = 0;
5603 BLOCK_SUBBLOCKS (current_block)
5604 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5605 return current_block;
5608 /* Reverse the order of elements in the chain T of blocks,
5609 and return the new head of the chain (old last element). */
5611 static tree
5612 blocks_nreverse (t)
5613 tree t;
5615 register tree prev = 0, decl, next;
5616 for (decl = t; decl; decl = next)
5618 next = BLOCK_CHAIN (decl);
5619 BLOCK_CHAIN (decl) = prev;
5620 prev = decl;
5622 return prev;
5625 /* Count the subblocks of the list starting with BLOCK, and list them
5626 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5627 blocks. */
5629 static int
5630 all_blocks (block, vector)
5631 tree block;
5632 tree *vector;
5634 int n_blocks = 0;
5636 while (block)
5638 TREE_ASM_WRITTEN (block) = 0;
5640 /* Record this block. */
5641 if (vector)
5642 vector[n_blocks] = block;
5644 ++n_blocks;
5646 /* Record the subblocks, and their subblocks... */
5647 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5648 vector ? vector + n_blocks : 0);
5649 block = BLOCK_CHAIN (block);
5652 return n_blocks;
5655 /* Allocate a function structure and reset its contents to the defaults. */
5656 static void
5657 prepare_function_start ()
5659 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5661 init_stmt_for_function ();
5662 init_eh_for_function ();
5664 cse_not_expected = ! optimize;
5666 /* Caller save not needed yet. */
5667 caller_save_needed = 0;
5669 /* No stack slots have been made yet. */
5670 stack_slot_list = 0;
5672 current_function_has_nonlocal_label = 0;
5673 current_function_has_nonlocal_goto = 0;
5675 /* There is no stack slot for handling nonlocal gotos. */
5676 nonlocal_goto_handler_slots = 0;
5677 nonlocal_goto_stack_level = 0;
5679 /* No labels have been declared for nonlocal use. */
5680 nonlocal_labels = 0;
5681 nonlocal_goto_handler_labels = 0;
5683 /* No function calls so far in this function. */
5684 function_call_count = 0;
5686 /* No parm regs have been allocated.
5687 (This is important for output_inline_function.) */
5688 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5690 /* Initialize the RTL mechanism. */
5691 init_emit ();
5693 /* Initialize the queue of pending postincrement and postdecrements,
5694 and some other info in expr.c. */
5695 init_expr ();
5697 /* We haven't done register allocation yet. */
5698 reg_renumber = 0;
5700 init_varasm_status (current_function);
5702 /* Clear out data used for inlining. */
5703 current_function->inlinable = 0;
5704 current_function->original_decl_initial = 0;
5705 current_function->original_arg_vector = 0;
5707 /* Set if a call to setjmp is seen. */
5708 current_function_calls_setjmp = 0;
5710 /* Set if a call to longjmp is seen. */
5711 current_function_calls_longjmp = 0;
5713 current_function_calls_alloca = 0;
5714 current_function_contains_functions = 0;
5715 current_function_is_leaf = 0;
5716 current_function_sp_is_unchanging = 0;
5717 current_function_uses_only_leaf_regs = 0;
5718 current_function_has_computed_jump = 0;
5719 current_function_is_thunk = 0;
5721 current_function_returns_pcc_struct = 0;
5722 current_function_returns_struct = 0;
5723 current_function_epilogue_delay_list = 0;
5724 current_function_uses_const_pool = 0;
5725 current_function_uses_pic_offset_table = 0;
5726 current_function_cannot_inline = 0;
5728 /* We have not yet needed to make a label to jump to for tail-recursion. */
5729 tail_recursion_label = 0;
5731 /* We haven't had a need to make a save area for ap yet. */
5732 arg_pointer_save_area = 0;
5734 /* No stack slots allocated yet. */
5735 frame_offset = 0;
5737 /* No SAVE_EXPRs in this function yet. */
5738 save_expr_regs = 0;
5740 /* No RTL_EXPRs in this function yet. */
5741 rtl_expr_chain = 0;
5743 /* Set up to allocate temporaries. */
5744 init_temp_slots ();
5746 /* Indicate that we need to distinguish between the return value of the
5747 present function and the return value of a function being called. */
5748 rtx_equal_function_value_matters = 1;
5750 /* Indicate that we have not instantiated virtual registers yet. */
5751 virtuals_instantiated = 0;
5753 /* Indicate we have no need of a frame pointer yet. */
5754 frame_pointer_needed = 0;
5756 /* By default assume not varargs or stdarg. */
5757 current_function_varargs = 0;
5758 current_function_stdarg = 0;
5760 /* We haven't made any trampolines for this function yet. */
5761 trampoline_list = 0;
5763 init_pending_stack_adjust ();
5764 inhibit_defer_pop = 0;
5766 current_function_outgoing_args_size = 0;
5768 if (init_lang_status)
5769 (*init_lang_status) (current_function);
5770 if (init_machine_status)
5771 (*init_machine_status) (current_function);
5774 /* Initialize the rtl expansion mechanism so that we can do simple things
5775 like generate sequences. This is used to provide a context during global
5776 initialization of some passes. */
5777 void
5778 init_dummy_function_start ()
5780 prepare_function_start ();
5783 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5784 and initialize static variables for generating RTL for the statements
5785 of the function. */
5787 void
5788 init_function_start (subr, filename, line)
5789 tree subr;
5790 char *filename;
5791 int line;
5793 prepare_function_start ();
5795 /* Remember this function for later. */
5796 current_function->next_global = all_functions;
5797 all_functions = current_function;
5799 current_function_name = (*decl_printable_name) (subr, 2);
5800 current_function->decl = subr;
5802 /* Nonzero if this is a nested function that uses a static chain. */
5804 current_function_needs_context
5805 = (decl_function_context (current_function_decl) != 0
5806 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5808 /* Within function body, compute a type's size as soon it is laid out. */
5809 immediate_size_expand++;
5811 /* Prevent ever trying to delete the first instruction of a function.
5812 Also tell final how to output a linenum before the function prologue.
5813 Note linenums could be missing, e.g. when compiling a Java .class file. */
5814 if (line > 0)
5815 emit_line_note (filename, line);
5817 /* Make sure first insn is a note even if we don't want linenums.
5818 This makes sure the first insn will never be deleted.
5819 Also, final expects a note to appear there. */
5820 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5822 /* Set flags used by final.c. */
5823 if (aggregate_value_p (DECL_RESULT (subr)))
5825 #ifdef PCC_STATIC_STRUCT_RETURN
5826 current_function_returns_pcc_struct = 1;
5827 #endif
5828 current_function_returns_struct = 1;
5831 /* Warn if this value is an aggregate type,
5832 regardless of which calling convention we are using for it. */
5833 if (warn_aggregate_return
5834 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5835 warning ("function returns an aggregate");
5837 current_function_returns_pointer
5838 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5841 /* Make sure all values used by the optimization passes have sane
5842 defaults. */
5843 void
5844 init_function_for_compilation ()
5846 reg_renumber = 0;
5847 /* No prologue/epilogue insns yet. */
5848 prologue = epilogue = 0;
5851 /* Indicate that the current function uses extra args
5852 not explicitly mentioned in the argument list in any fashion. */
5854 void
5855 mark_varargs ()
5857 current_function_varargs = 1;
5860 /* Expand a call to __main at the beginning of a possible main function. */
5862 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5863 #undef HAS_INIT_SECTION
5864 #define HAS_INIT_SECTION
5865 #endif
5867 void
5868 expand_main_function ()
5870 #if !defined (HAS_INIT_SECTION)
5871 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5872 VOIDmode, 0);
5873 #endif /* not HAS_INIT_SECTION */
5876 extern struct obstack permanent_obstack;
5878 /* Start the RTL for a new function, and set variables used for
5879 emitting RTL.
5880 SUBR is the FUNCTION_DECL node.
5881 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5882 the function's parameters, which must be run at any return statement. */
5884 void
5885 expand_function_start (subr, parms_have_cleanups)
5886 tree subr;
5887 int parms_have_cleanups;
5889 register int i;
5890 tree tem;
5891 rtx last_ptr = NULL_RTX;
5893 /* Make sure volatile mem refs aren't considered
5894 valid operands of arithmetic insns. */
5895 init_recog_no_volatile ();
5897 /* Set this before generating any memory accesses. */
5898 current_function_check_memory_usage
5899 = (flag_check_memory_usage
5900 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5902 current_function_instrument_entry_exit
5903 = (flag_instrument_function_entry_exit
5904 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5906 /* If function gets a static chain arg, store it in the stack frame.
5907 Do this first, so it gets the first stack slot offset. */
5908 if (current_function_needs_context)
5910 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5912 /* Delay copying static chain if it is not a register to avoid
5913 conflicts with regs used for parameters. */
5914 if (! SMALL_REGISTER_CLASSES
5915 || GET_CODE (static_chain_incoming_rtx) == REG)
5916 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5919 /* If the parameters of this function need cleaning up, get a label
5920 for the beginning of the code which executes those cleanups. This must
5921 be done before doing anything with return_label. */
5922 if (parms_have_cleanups)
5923 cleanup_label = gen_label_rtx ();
5924 else
5925 cleanup_label = 0;
5927 /* Make the label for return statements to jump to, if this machine
5928 does not have a one-instruction return and uses an epilogue,
5929 or if it returns a structure, or if it has parm cleanups. */
5930 #ifdef HAVE_return
5931 if (cleanup_label == 0 && HAVE_return
5932 && ! current_function_instrument_entry_exit
5933 && ! current_function_returns_pcc_struct
5934 && ! (current_function_returns_struct && ! optimize))
5935 return_label = 0;
5936 else
5937 return_label = gen_label_rtx ();
5938 #else
5939 return_label = gen_label_rtx ();
5940 #endif
5942 /* Initialize rtx used to return the value. */
5943 /* Do this before assign_parms so that we copy the struct value address
5944 before any library calls that assign parms might generate. */
5946 /* Decide whether to return the value in memory or in a register. */
5947 if (aggregate_value_p (DECL_RESULT (subr)))
5949 /* Returning something that won't go in a register. */
5950 register rtx value_address = 0;
5952 #ifdef PCC_STATIC_STRUCT_RETURN
5953 if (current_function_returns_pcc_struct)
5955 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5956 value_address = assemble_static_space (size);
5958 else
5959 #endif
5961 /* Expect to be passed the address of a place to store the value.
5962 If it is passed as an argument, assign_parms will take care of
5963 it. */
5964 if (struct_value_incoming_rtx)
5966 value_address = gen_reg_rtx (Pmode);
5967 emit_move_insn (value_address, struct_value_incoming_rtx);
5970 if (value_address)
5972 DECL_RTL (DECL_RESULT (subr))
5973 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5974 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5975 AGGREGATE_TYPE_P (TREE_TYPE
5976 (DECL_RESULT
5977 (subr))));
5980 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5981 /* If return mode is void, this decl rtl should not be used. */
5982 DECL_RTL (DECL_RESULT (subr)) = 0;
5983 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5985 /* If function will end with cleanup code for parms,
5986 compute the return values into a pseudo reg,
5987 which we will copy into the true return register
5988 after the cleanups are done. */
5990 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5992 #ifdef PROMOTE_FUNCTION_RETURN
5993 tree type = TREE_TYPE (DECL_RESULT (subr));
5994 int unsignedp = TREE_UNSIGNED (type);
5996 mode = promote_mode (type, mode, &unsignedp, 1);
5997 #endif
5999 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6001 else
6002 /* Scalar, returned in a register. */
6004 #ifdef FUNCTION_OUTGOING_VALUE
6005 DECL_RTL (DECL_RESULT (subr))
6006 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6007 #else
6008 DECL_RTL (DECL_RESULT (subr))
6009 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6010 #endif
6012 /* Mark this reg as the function's return value. */
6013 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6015 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6016 /* Needed because we may need to move this to memory
6017 in case it's a named return value whose address is taken. */
6018 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6022 /* Initialize rtx for parameters and local variables.
6023 In some cases this requires emitting insns. */
6025 assign_parms (subr);
6027 /* Copy the static chain now if it wasn't a register. The delay is to
6028 avoid conflicts with the parameter passing registers. */
6030 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6031 if (GET_CODE (static_chain_incoming_rtx) != REG)
6032 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6034 /* The following was moved from init_function_start.
6035 The move is supposed to make sdb output more accurate. */
6036 /* Indicate the beginning of the function body,
6037 as opposed to parm setup. */
6038 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6040 /* If doing stupid allocation, mark parms as born here. */
6042 if (GET_CODE (get_last_insn ()) != NOTE)
6043 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6044 parm_birth_insn = get_last_insn ();
6046 if (obey_regdecls)
6048 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6049 use_variable (regno_reg_rtx[i]);
6051 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6052 use_variable (current_function_internal_arg_pointer);
6055 context_display = 0;
6056 if (current_function_needs_context)
6058 /* Fetch static chain values for containing functions. */
6059 tem = decl_function_context (current_function_decl);
6060 /* If not doing stupid register allocation copy the static chain
6061 pointer into a pseudo. If we have small register classes, copy
6062 the value from memory if static_chain_incoming_rtx is a REG. If
6063 we do stupid register allocation, we use the stack address
6064 generated above. */
6065 if (tem && ! obey_regdecls)
6067 /* If the static chain originally came in a register, put it back
6068 there, then move it out in the next insn. The reason for
6069 this peculiar code is to satisfy function integration. */
6070 if (SMALL_REGISTER_CLASSES
6071 && GET_CODE (static_chain_incoming_rtx) == REG)
6072 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6073 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6076 while (tem)
6078 tree rtlexp = make_node (RTL_EXPR);
6080 RTL_EXPR_RTL (rtlexp) = last_ptr;
6081 context_display = tree_cons (tem, rtlexp, context_display);
6082 tem = decl_function_context (tem);
6083 if (tem == 0)
6084 break;
6085 /* Chain thru stack frames, assuming pointer to next lexical frame
6086 is found at the place we always store it. */
6087 #ifdef FRAME_GROWS_DOWNWARD
6088 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6089 #endif
6090 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6091 memory_address (Pmode,
6092 last_ptr)));
6094 /* If we are not optimizing, ensure that we know that this
6095 piece of context is live over the entire function. */
6096 if (! optimize)
6097 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6098 save_expr_regs);
6102 if (current_function_instrument_entry_exit)
6104 rtx fun = DECL_RTL (current_function_decl);
6105 if (GET_CODE (fun) == MEM)
6106 fun = XEXP (fun, 0);
6107 else
6108 abort ();
6109 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6110 fun, Pmode,
6111 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6113 hard_frame_pointer_rtx),
6114 Pmode);
6117 /* After the display initializations is where the tail-recursion label
6118 should go, if we end up needing one. Ensure we have a NOTE here
6119 since some things (like trampolines) get placed before this. */
6120 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6122 /* Evaluate now the sizes of any types declared among the arguments. */
6123 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6125 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6126 EXPAND_MEMORY_USE_BAD);
6127 /* Flush the queue in case this parameter declaration has
6128 side-effects. */
6129 emit_queue ();
6132 /* Make sure there is a line number after the function entry setup code. */
6133 force_next_line_note ();
6136 /* Undo the effects of init_dummy_function_start. */
6137 void
6138 expand_dummy_function_end ()
6140 /* End any sequences that failed to be closed due to syntax errors. */
6141 while (in_sequence_p ())
6142 end_sequence ();
6144 /* Outside function body, can't compute type's actual size
6145 until next function's body starts. */
6147 free_after_parsing (current_function);
6148 free_after_compilation (current_function);
6149 free (current_function);
6150 current_function = 0;
6153 /* Generate RTL for the end of the current function.
6154 FILENAME and LINE are the current position in the source file.
6156 It is up to language-specific callers to do cleanups for parameters--
6157 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6159 void
6160 expand_function_end (filename, line, end_bindings)
6161 char *filename;
6162 int line;
6163 int end_bindings;
6165 register int i;
6166 tree link;
6168 #ifdef TRAMPOLINE_TEMPLATE
6169 static rtx initial_trampoline;
6170 #endif
6172 finish_expr_for_function ();
6174 #ifdef NON_SAVING_SETJMP
6175 /* Don't put any variables in registers if we call setjmp
6176 on a machine that fails to restore the registers. */
6177 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6179 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6180 setjmp_protect (DECL_INITIAL (current_function_decl));
6182 setjmp_protect_args ();
6184 #endif
6186 /* Save the argument pointer if a save area was made for it. */
6187 if (arg_pointer_save_area)
6189 /* arg_pointer_save_area may not be a valid memory address, so we
6190 have to check it and fix it if necessary. */
6191 rtx seq;
6192 start_sequence ();
6193 emit_move_insn (validize_mem (arg_pointer_save_area),
6194 virtual_incoming_args_rtx);
6195 seq = gen_sequence ();
6196 end_sequence ();
6197 emit_insn_before (seq, tail_recursion_reentry);
6200 /* Initialize any trampolines required by this function. */
6201 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6203 tree function = TREE_PURPOSE (link);
6204 rtx context = lookup_static_chain (function);
6205 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6206 #ifdef TRAMPOLINE_TEMPLATE
6207 rtx blktramp;
6208 #endif
6209 rtx seq;
6211 #ifdef TRAMPOLINE_TEMPLATE
6212 /* First make sure this compilation has a template for
6213 initializing trampolines. */
6214 if (initial_trampoline == 0)
6216 end_temporary_allocation ();
6217 initial_trampoline
6218 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6219 resume_temporary_allocation ();
6221 ggc_add_rtx_root (&initial_trampoline, 1);
6223 #endif
6225 /* Generate insns to initialize the trampoline. */
6226 start_sequence ();
6227 tramp = round_trampoline_addr (XEXP (tramp, 0));
6228 #ifdef TRAMPOLINE_TEMPLATE
6229 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6230 emit_block_move (blktramp, initial_trampoline,
6231 GEN_INT (TRAMPOLINE_SIZE),
6232 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6233 #endif
6234 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6235 seq = get_insns ();
6236 end_sequence ();
6238 /* Put those insns at entry to the containing function (this one). */
6239 emit_insns_before (seq, tail_recursion_reentry);
6242 /* If we are doing stack checking and this function makes calls,
6243 do a stack probe at the start of the function to ensure we have enough
6244 space for another stack frame. */
6245 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6247 rtx insn, seq;
6249 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6250 if (GET_CODE (insn) == CALL_INSN)
6252 start_sequence ();
6253 probe_stack_range (STACK_CHECK_PROTECT,
6254 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6255 seq = get_insns ();
6256 end_sequence ();
6257 emit_insns_before (seq, tail_recursion_reentry);
6258 break;
6262 /* Warn about unused parms if extra warnings were specified. */
6263 if (warn_unused && extra_warnings)
6265 tree decl;
6267 for (decl = DECL_ARGUMENTS (current_function_decl);
6268 decl; decl = TREE_CHAIN (decl))
6269 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6270 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6271 warning_with_decl (decl, "unused parameter `%s'");
6274 /* Delete handlers for nonlocal gotos if nothing uses them. */
6275 if (nonlocal_goto_handler_slots != 0
6276 && ! current_function_has_nonlocal_label)
6277 delete_handlers ();
6279 /* End any sequences that failed to be closed due to syntax errors. */
6280 while (in_sequence_p ())
6281 end_sequence ();
6283 /* Outside function body, can't compute type's actual size
6284 until next function's body starts. */
6285 immediate_size_expand--;
6287 /* If doing stupid register allocation,
6288 mark register parms as dying here. */
6290 if (obey_regdecls)
6292 rtx tem;
6293 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6294 use_variable (regno_reg_rtx[i]);
6296 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6298 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6300 use_variable (XEXP (tem, 0));
6301 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6304 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6305 use_variable (current_function_internal_arg_pointer);
6308 clear_pending_stack_adjust ();
6309 do_pending_stack_adjust ();
6311 /* Mark the end of the function body.
6312 If control reaches this insn, the function can drop through
6313 without returning a value. */
6314 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6316 /* Must mark the last line number note in the function, so that the test
6317 coverage code can avoid counting the last line twice. This just tells
6318 the code to ignore the immediately following line note, since there
6319 already exists a copy of this note somewhere above. This line number
6320 note is still needed for debugging though, so we can't delete it. */
6321 if (flag_test_coverage)
6322 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6324 /* Output a linenumber for the end of the function.
6325 SDB depends on this. */
6326 emit_line_note_force (filename, line);
6328 /* Output the label for the actual return from the function,
6329 if one is expected. This happens either because a function epilogue
6330 is used instead of a return instruction, or because a return was done
6331 with a goto in order to run local cleanups, or because of pcc-style
6332 structure returning. */
6334 if (return_label)
6335 emit_label (return_label);
6337 /* C++ uses this. */
6338 if (end_bindings)
6339 expand_end_bindings (0, 0, 0);
6341 /* Now handle any leftover exception regions that may have been
6342 created for the parameters. */
6344 rtx last = get_last_insn ();
6345 rtx label;
6347 expand_leftover_cleanups ();
6349 /* If the above emitted any code, may sure we jump around it. */
6350 if (last != get_last_insn ())
6352 label = gen_label_rtx ();
6353 last = emit_jump_insn_after (gen_jump (label), last);
6354 last = emit_barrier_after (last);
6355 emit_label (label);
6359 if (current_function_instrument_entry_exit)
6361 rtx fun = DECL_RTL (current_function_decl);
6362 if (GET_CODE (fun) == MEM)
6363 fun = XEXP (fun, 0);
6364 else
6365 abort ();
6366 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6367 fun, Pmode,
6368 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6370 hard_frame_pointer_rtx),
6371 Pmode);
6374 /* If we had calls to alloca, and this machine needs
6375 an accurate stack pointer to exit the function,
6376 insert some code to save and restore the stack pointer. */
6377 #ifdef EXIT_IGNORE_STACK
6378 if (! EXIT_IGNORE_STACK)
6379 #endif
6380 if (current_function_calls_alloca)
6382 rtx tem = 0;
6384 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6385 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6388 /* If scalar return value was computed in a pseudo-reg,
6389 copy that to the hard return register. */
6390 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6391 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6392 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6393 >= FIRST_PSEUDO_REGISTER))
6395 rtx real_decl_result;
6397 #ifdef FUNCTION_OUTGOING_VALUE
6398 real_decl_result
6399 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6400 current_function_decl);
6401 #else
6402 real_decl_result
6403 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6404 current_function_decl);
6405 #endif
6406 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6407 /* If this is a BLKmode structure being returned in registers, then use
6408 the mode computed in expand_return. */
6409 if (GET_MODE (real_decl_result) == BLKmode)
6410 PUT_MODE (real_decl_result,
6411 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6412 emit_move_insn (real_decl_result,
6413 DECL_RTL (DECL_RESULT (current_function_decl)));
6414 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6416 /* The delay slot scheduler assumes that current_function_return_rtx
6417 holds the hard register containing the return value, not a temporary
6418 pseudo. */
6419 current_function_return_rtx = real_decl_result;
6422 /* If returning a structure, arrange to return the address of the value
6423 in a place where debuggers expect to find it.
6425 If returning a structure PCC style,
6426 the caller also depends on this value.
6427 And current_function_returns_pcc_struct is not necessarily set. */
6428 if (current_function_returns_struct
6429 || current_function_returns_pcc_struct)
6431 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6432 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6433 #ifdef FUNCTION_OUTGOING_VALUE
6434 rtx outgoing
6435 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6436 current_function_decl);
6437 #else
6438 rtx outgoing
6439 = FUNCTION_VALUE (build_pointer_type (type),
6440 current_function_decl);
6441 #endif
6443 /* Mark this as a function return value so integrate will delete the
6444 assignment and USE below when inlining this function. */
6445 REG_FUNCTION_VALUE_P (outgoing) = 1;
6447 emit_move_insn (outgoing, value_address);
6448 use_variable (outgoing);
6451 /* If this is an implementation of __throw, do what's necessary to
6452 communicate between __builtin_eh_return and the epilogue. */
6453 expand_eh_return ();
6455 /* Output a return insn if we are using one.
6456 Otherwise, let the rtl chain end here, to drop through
6457 into the epilogue. */
6459 #ifdef HAVE_return
6460 if (HAVE_return)
6462 emit_jump_insn (gen_return ());
6463 emit_barrier ();
6465 #endif
6467 /* Fix up any gotos that jumped out to the outermost
6468 binding level of the function.
6469 Must follow emitting RETURN_LABEL. */
6471 /* If you have any cleanups to do at this point,
6472 and they need to create temporary variables,
6473 then you will lose. */
6474 expand_fixups (get_insns ());
6477 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6478 or a single insn). */
6480 static int *
6481 record_insns (insns)
6482 rtx insns;
6484 int *vec;
6486 if (GET_CODE (insns) == SEQUENCE)
6488 int len = XVECLEN (insns, 0);
6489 vec = (int *) oballoc ((len + 1) * sizeof (int));
6490 vec[len] = 0;
6491 while (--len >= 0)
6492 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6494 else
6496 vec = (int *) oballoc (2 * sizeof (int));
6497 vec[0] = INSN_UID (insns);
6498 vec[1] = 0;
6500 return vec;
6503 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6505 static int
6506 contains (insn, vec)
6507 rtx insn;
6508 int *vec;
6510 register int i, j;
6512 if (GET_CODE (insn) == INSN
6513 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6515 int count = 0;
6516 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6517 for (j = 0; vec[j]; j++)
6518 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6519 count++;
6520 return count;
6522 else
6524 for (j = 0; vec[j]; j++)
6525 if (INSN_UID (insn) == vec[j])
6526 return 1;
6528 return 0;
6532 prologue_epilogue_contains (insn)
6533 rtx insn;
6535 if (prologue && contains (insn, prologue))
6536 return 1;
6537 if (epilogue && contains (insn, epilogue))
6538 return 1;
6539 return 0;
6542 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6543 this into place with notes indicating where the prologue ends and where
6544 the epilogue begins. Update the basic block information when possible. */
6546 void
6547 thread_prologue_and_epilogue_insns (f)
6548 rtx f ATTRIBUTE_UNUSED;
6550 int insertted = 0;
6552 #ifdef HAVE_prologue
6553 if (HAVE_prologue)
6555 rtx seq;
6557 start_sequence ();
6558 seq = gen_prologue();
6559 emit_insn (seq);
6561 /* Retain a map of the prologue insns. */
6562 if (GET_CODE (seq) != SEQUENCE)
6563 seq = get_insns ();
6564 prologue = record_insns (seq);
6566 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6567 seq = gen_sequence ();
6568 end_sequence ();
6570 /* If optimization is off, and perhaps in an empty function,
6571 the entry block will have no successors. */
6572 if (ENTRY_BLOCK_PTR->succ)
6574 /* Can't deal with multiple successsors of the entry block. */
6575 if (ENTRY_BLOCK_PTR->succ->succ_next)
6576 abort ();
6578 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6579 insertted = 1;
6581 else
6582 emit_insn_after (seq, f);
6584 #endif
6586 #ifdef HAVE_epilogue
6587 if (HAVE_epilogue)
6589 edge e;
6590 basic_block bb = 0;
6591 rtx tail = get_last_insn ();
6593 /* ??? This is gastly. If function returns were not done via uses,
6594 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6595 and all of this uglyness would go away. */
6597 switch (optimize)
6599 default:
6600 /* If the exit block has no non-fake predecessors, we don't
6601 need an epilogue. Furthermore, only pay attention to the
6602 fallthru predecessors; if (conditional) return insns were
6603 generated, by definition we do not need to emit epilogue
6604 insns. */
6606 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6607 if ((e->flags & EDGE_FAKE) == 0
6608 && (e->flags & EDGE_FALLTHRU) != 0)
6609 break;
6610 if (e == NULL)
6611 break;
6613 /* We can't handle multiple epilogues -- if one is needed,
6614 we won't be able to place it multiple times.
6616 ??? Fix epilogue expanders to not assume they are the
6617 last thing done compiling the function. Either that
6618 or copy_rtx each insn.
6620 ??? Blah, it's not a simple expression to assert that
6621 we've exactly one fallthru exit edge. */
6623 bb = e->src;
6624 tail = bb->end;
6626 /* ??? If the last insn of the basic block is a jump, then we
6627 are creating a new basic block. Wimp out and leave these
6628 insns outside any block. */
6629 if (GET_CODE (tail) == JUMP_INSN)
6630 bb = 0;
6632 /* FALLTHRU */
6633 case 0:
6635 rtx prev, seq, first_use;
6637 /* Move the USE insns at the end of a function onto a list. */
6638 prev = tail;
6639 if (GET_CODE (prev) == BARRIER
6640 || GET_CODE (prev) == NOTE)
6641 prev = prev_nonnote_insn (prev);
6643 first_use = 0;
6644 if (prev
6645 && GET_CODE (prev) == INSN
6646 && GET_CODE (PATTERN (prev)) == USE)
6648 /* If the end of the block is the use, grab hold of something
6649 else so that we emit barriers etc in the right place. */
6650 if (prev == tail)
6653 tail = PREV_INSN (tail);
6654 while (GET_CODE (tail) == INSN
6655 && GET_CODE (PATTERN (tail)) == USE);
6660 rtx use = prev;
6661 prev = prev_nonnote_insn (prev);
6663 remove_insn (use);
6664 if (first_use)
6666 NEXT_INSN (use) = first_use;
6667 PREV_INSN (first_use) = use;
6669 else
6670 NEXT_INSN (use) = NULL_RTX;
6671 first_use = use;
6673 while (prev
6674 && GET_CODE (prev) == INSN
6675 && GET_CODE (PATTERN (prev)) == USE);
6678 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6679 epilogue insns, the USE insns at the end of a function,
6680 the jump insn that returns, and then a BARRIER. */
6682 if (GET_CODE (tail) != BARRIER)
6684 prev = next_nonnote_insn (tail);
6685 if (!prev || GET_CODE (prev) != BARRIER)
6686 emit_barrier_after (tail);
6689 seq = gen_epilogue ();
6690 prev = tail;
6691 tail = emit_jump_insn_after (seq, tail);
6693 /* Insert the USE insns immediately before the return insn, which
6694 must be the last instruction emitted in the sequence. */
6695 if (first_use)
6696 emit_insns_before (first_use, tail);
6697 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6699 /* Update the tail of the basic block. */
6700 if (bb)
6701 bb->end = tail;
6703 /* Retain a map of the epilogue insns. */
6704 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6708 #endif
6710 if (insertted)
6711 commit_edge_insertions ();
6714 /* Reposition the prologue-end and epilogue-begin notes after instruction
6715 scheduling and delayed branch scheduling. */
6717 void
6718 reposition_prologue_and_epilogue_notes (f)
6719 rtx f ATTRIBUTE_UNUSED;
6721 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6722 /* Reposition the prologue and epilogue notes. */
6723 if (n_basic_blocks)
6725 int len;
6727 if (prologue)
6729 register rtx insn, note = 0;
6731 /* Scan from the beginning until we reach the last prologue insn.
6732 We apparently can't depend on basic_block_{head,end} after
6733 reorg has run. */
6734 for (len = 0; prologue[len]; len++)
6736 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6738 if (GET_CODE (insn) == NOTE)
6740 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6741 note = insn;
6743 else if ((len -= contains (insn, prologue)) == 0)
6745 rtx next;
6746 /* Find the prologue-end note if we haven't already, and
6747 move it to just after the last prologue insn. */
6748 if (note == 0)
6750 for (note = insn; (note = NEXT_INSN (note));)
6751 if (GET_CODE (note) == NOTE
6752 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6753 break;
6756 next = NEXT_INSN (note);
6758 /* Whether or not we can depend on BLOCK_HEAD,
6759 attempt to keep it up-to-date. */
6760 if (BLOCK_HEAD (0) == note)
6761 BLOCK_HEAD (0) = next;
6763 remove_insn (note);
6764 add_insn_after (note, insn);
6769 if (epilogue)
6771 register rtx insn, note = 0;
6773 /* Scan from the end until we reach the first epilogue insn.
6774 We apparently can't depend on basic_block_{head,end} after
6775 reorg has run. */
6776 for (len = 0; epilogue[len]; len++)
6778 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6780 if (GET_CODE (insn) == NOTE)
6782 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6783 note = insn;
6785 else if ((len -= contains (insn, epilogue)) == 0)
6787 /* Find the epilogue-begin note if we haven't already, and
6788 move it to just before the first epilogue insn. */
6789 if (note == 0)
6791 for (note = insn; (note = PREV_INSN (note));)
6792 if (GET_CODE (note) == NOTE
6793 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6794 break;
6797 /* Whether or not we can depend on BLOCK_HEAD,
6798 attempt to keep it up-to-date. */
6799 if (n_basic_blocks
6800 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6801 BLOCK_HEAD (n_basic_blocks-1) = note;
6803 remove_insn (note);
6804 add_insn_before (note, insn);
6809 #endif /* HAVE_prologue or HAVE_epilogue */
6812 /* Mark T for GC. */
6814 static void
6815 mark_temp_slot (t)
6816 struct temp_slot *t;
6818 while (t)
6820 ggc_mark_rtx (t->slot);
6821 ggc_mark_rtx (t->address);
6822 ggc_mark_tree (t->rtl_expr);
6824 t = t->next;
6828 /* Mark P for GC. */
6830 static void
6831 mark_function_status (p)
6832 struct function *p;
6834 int i;
6835 rtx *r;
6837 if (p == 0)
6838 return;
6840 ggc_mark_rtx (p->arg_offset_rtx);
6842 if (p->x_parm_reg_stack_loc)
6843 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6844 i > 0; --i, ++r)
6845 ggc_mark_rtx (*r);
6847 ggc_mark_rtx (p->return_rtx);
6848 ggc_mark_rtx (p->x_cleanup_label);
6849 ggc_mark_rtx (p->x_return_label);
6850 ggc_mark_rtx (p->x_save_expr_regs);
6851 ggc_mark_rtx (p->x_stack_slot_list);
6852 ggc_mark_rtx (p->x_parm_birth_insn);
6853 ggc_mark_rtx (p->x_tail_recursion_label);
6854 ggc_mark_rtx (p->x_tail_recursion_reentry);
6855 ggc_mark_rtx (p->internal_arg_pointer);
6856 ggc_mark_rtx (p->x_arg_pointer_save_area);
6857 ggc_mark_tree (p->x_rtl_expr_chain);
6858 ggc_mark_rtx (p->x_last_parm_insn);
6859 ggc_mark_tree (p->x_context_display);
6860 ggc_mark_tree (p->x_trampoline_list);
6861 ggc_mark_rtx (p->epilogue_delay_list);
6863 mark_temp_slot (p->x_temp_slots);
6866 struct var_refs_queue *q = p->fixup_var_refs_queue;
6867 while (q)
6869 ggc_mark_rtx (q->modified);
6870 q = q->next;
6874 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6875 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6876 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6877 ggc_mark_tree (p->x_nonlocal_labels);
6880 /* Mark the function chain ARG (which is really a struct function **)
6881 for GC. */
6883 static void
6884 mark_function_chain (arg)
6885 void *arg;
6887 struct function *f = *(struct function **) arg;
6889 for (; f; f = f->next_global)
6891 ggc_mark_tree (f->decl);
6893 mark_function_status (f);
6894 mark_eh_status (f->eh);
6895 mark_stmt_status (f->stmt);
6896 mark_expr_status (f->expr);
6897 mark_emit_status (f->emit);
6898 mark_varasm_status (f->varasm);
6900 if (mark_machine_status)
6901 (*mark_machine_status) (f);
6902 if (mark_lang_status)
6903 (*mark_lang_status) (f);
6905 if (f->original_arg_vector)
6906 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6907 if (f->original_decl_initial)
6908 ggc_mark_tree (f->original_decl_initial);
6912 /* Called once, at initialization, to initialize function.c. */
6914 void
6915 init_function_once ()
6917 ggc_add_root (&all_functions, 1, sizeof all_functions,
6918 mark_function_chain);