* decl.c (pop_cp_function_context): Don't call free on a NULL
[official-gcc.git] / gcc / function.c
blobf6f5d88ede416cfa731279fea948c12ee8c50fab
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
108 int current_function_sp_is_unchanging;
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117 static int virtuals_instantiated;
119 /* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
122 void (*init_machine_status) PROTO((struct function *));
123 void (*save_machine_status) PROTO((struct function *));
124 void (*restore_machine_status) PROTO((struct function *));
125 void (*mark_machine_status) PROTO((struct function *));
126 void (*free_machine_status) PROTO((struct function *));
128 /* Likewise, but for language-specific data. */
129 void (*init_lang_status) PROTO((struct function *));
130 void (*save_lang_status) PROTO((struct function *));
131 void (*restore_lang_status) PROTO((struct function *));
132 void (*mark_lang_status) PROTO((struct function *));
133 void (*free_lang_status) PROTO((struct function *));
135 /* The FUNCTION_DECL for an inline function currently being expanded. */
136 tree inline_function_decl;
138 /* The currently compiled function. */
139 struct function *current_function = 0;
141 /* Global list of all compiled functions. */
142 struct function *all_functions = 0;
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static int *prologue;
146 static int *epilogue;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
166 struct temp_slot
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
189 int alias_set;
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 tree rtl_expr;
192 /* Non-zero if this temporary is currently in use. */
193 char in_use;
194 /* Non-zero if this temporary has its address taken. */
195 char addr_taken;
196 /* Nesting level at which this slot is being used. */
197 int level;
198 /* Non-zero if this should survive a call to free_temp_slots. */
199 int keep;
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
202 HOST_WIDE_INT base_offset;
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
205 HOST_WIDE_INT full_size;
208 /* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
213 struct fixup_replacement
215 rtx old;
216 rtx new;
217 struct fixup_replacement *next;
220 struct insns_for_mem_entry {
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
232 int, tree));
233 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
234 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
235 enum machine_mode, enum machine_mode,
236 int, int, int,
237 struct hash_table *));
238 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
239 struct hash_table *));
240 static struct fixup_replacement
241 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
242 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
243 rtx, int, struct hash_table *));
244 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **));
246 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
247 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
248 static rtx fixup_stack_1 PROTO((rtx, rtx));
249 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
250 static void instantiate_decls PROTO((tree, int));
251 static void instantiate_decls_1 PROTO((tree, int));
252 static void instantiate_decl PROTO((rtx, int, int));
253 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
254 static void delete_handlers PROTO((void));
255 static void pad_to_arg_alignment PROTO((struct args_size *, int, struct args_size *));
256 #ifndef ARGS_GROW_DOWNWARD
257 static void pad_below PROTO((struct args_size *, enum machine_mode,
258 tree));
259 #endif
260 #ifdef ARGS_GROW_DOWNWARD
261 static tree round_down PROTO((tree, int));
262 #endif
263 static rtx round_trampoline_addr PROTO((rtx));
264 static tree blocks_nreverse PROTO((tree));
265 static int all_blocks PROTO((tree, tree *));
266 /* We always define `record_insns' even if its not used so that we
267 can always export `prologue_epilogue_contains'. */
268 static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
269 static int contains PROTO((rtx, int *));
270 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
271 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
272 struct hash_table *));
273 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
274 struct hash_table *,
275 hash_table_key));
276 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
277 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
278 static int insns_for_mem_walk PROTO ((rtx *, void *));
279 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
280 static void mark_temp_slot PROTO ((struct temp_slot *));
281 static void mark_function_status PROTO ((struct function *));
282 static void mark_function_chain PROTO ((void *));
283 static void prepare_function_start PROTO ((void));
286 /* Pointer to chain of `struct function' for containing functions. */
287 struct function *outer_function_chain;
289 /* Given a function decl for a containing function,
290 return the `struct function' for it. */
292 struct function *
293 find_function_data (decl)
294 tree decl;
296 struct function *p;
298 for (p = outer_function_chain; p; p = p->next)
299 if (p->decl == decl)
300 return p;
302 abort ();
305 /* Save the current context for compilation of a nested function.
306 This is called from language-specific code. The caller should use
307 the save_lang_status callback to save any language-specific state,
308 since this function knows only about language-independent
309 variables. */
311 void
312 push_function_context_to (context)
313 tree context;
315 struct function *p, *context_data;
317 if (context)
319 context_data = (context == current_function_decl
320 ? current_function
321 : find_function_data (context));
322 context_data->contains_functions = 1;
325 if (current_function == 0)
326 init_dummy_function_start ();
327 p = current_function;
329 p->next = outer_function_chain;
330 outer_function_chain = p;
331 p->fixup_var_refs_queue = 0;
333 save_tree_status (p);
334 if (save_lang_status)
335 (*save_lang_status) (p);
336 if (save_machine_status)
337 (*save_machine_status) (p);
339 current_function = 0;
342 void
343 push_function_context ()
345 push_function_context_to (current_function_decl);
348 /* Restore the last saved context, at the end of a nested function.
349 This function is called from language-specific code. */
351 void
352 pop_function_context_from (context)
353 tree context ATTRIBUTE_UNUSED;
355 struct function *p = outer_function_chain;
356 struct var_refs_queue *queue;
357 struct var_refs_queue *next;
359 current_function = p;
360 outer_function_chain = p->next;
362 current_function_decl = p->decl;
363 reg_renumber = 0;
365 restore_tree_status (p);
366 restore_emit_status (p);
368 if (restore_machine_status)
369 (*restore_machine_status) (p);
370 if (restore_lang_status)
371 (*restore_lang_status) (p);
373 /* Finish doing put_var_into_stack for any of our variables
374 which became addressable during the nested function. */
375 for (queue = p->fixup_var_refs_queue; queue; queue = next)
377 next = queue->next;
378 fixup_var_refs (queue->modified, queue->promoted_mode,
379 queue->unsignedp, 0);
380 free (queue);
382 p->fixup_var_refs_queue = 0;
384 /* Reset variables that have known state during rtx generation. */
385 rtx_equal_function_value_matters = 1;
386 virtuals_instantiated = 0;
389 void
390 pop_function_context ()
392 pop_function_context_from (current_function_decl);
395 /* Clear out all parts of the state in F that can safely be discarded
396 after the function has been parsed, but not compiled, to let
397 garbage collection reclaim the memory. */
399 void
400 free_after_parsing (f)
401 struct function *f;
403 /* f->expr->forced_labels is used by code generation. */
404 /* f->emit->regno_reg_rtx is used by code generation. */
405 /* f->varasm is used by code generation. */
406 /* f->eh->eh_return_stub_label is used by code generation. */
408 if (free_lang_status)
409 (*free_lang_status) (f);
410 free_stmt_status (f);
413 /* Clear out all parts of the state in F that can safely be discarded
414 after the function has been compiled, to let garbage collection
415 reclaim the memory. */
417 void
418 free_after_compilation (f)
419 struct function *f;
421 free_eh_status (f);
422 free_expr_status (f);
423 free_emit_status (f);
424 free_varasm_status (f);
426 if (free_machine_status)
427 (*free_machine_status) (f);
429 if (f->x_parm_reg_stack_loc)
430 free (f->x_parm_reg_stack_loc);
432 f->arg_offset_rtx = NULL;
433 f->return_rtx = NULL;
434 f->internal_arg_pointer = NULL;
435 f->x_nonlocal_labels = NULL;
436 f->x_nonlocal_goto_handler_slots = NULL;
437 f->x_nonlocal_goto_handler_labels = NULL;
438 f->x_nonlocal_goto_stack_level = NULL;
439 f->x_cleanup_label = NULL;
440 f->x_return_label = NULL;
441 f->x_save_expr_regs = NULL;
442 f->x_stack_slot_list = NULL;
443 f->x_rtl_expr_chain = NULL;
444 f->x_tail_recursion_label = NULL;
445 f->x_tail_recursion_reentry = NULL;
446 f->x_arg_pointer_save_area = NULL;
447 f->x_context_display = NULL;
448 f->x_trampoline_list = NULL;
449 f->x_parm_birth_insn = NULL;
450 f->x_last_parm_insn = NULL;
451 f->x_parm_reg_stack_loc = NULL;
452 f->x_temp_slots = NULL;
453 f->fixup_var_refs_queue = NULL;
454 f->original_arg_vector = NULL;
455 f->original_decl_initial = NULL;
456 f->inl_last_parm_insn = NULL;
457 f->epilogue_delay_list = NULL;
461 /* Allocate fixed slots in the stack frame of the current function. */
463 /* Return size needed for stack frame based on slots so far allocated in
464 function F.
465 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
466 the caller may have to do that. */
468 HOST_WIDE_INT
469 get_func_frame_size (f)
470 struct function *f;
472 #ifdef FRAME_GROWS_DOWNWARD
473 return -f->x_frame_offset;
474 #else
475 return f->x_frame_offset;
476 #endif
479 /* Return size needed for stack frame based on slots so far allocated.
480 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
481 the caller may have to do that. */
482 HOST_WIDE_INT
483 get_frame_size ()
485 return get_func_frame_size (current_function);
488 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
489 with machine mode MODE.
491 ALIGN controls the amount of alignment for the address of the slot:
492 0 means according to MODE,
493 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
494 positive specifies alignment boundary in bits.
496 We do not round to stack_boundary here.
498 FUNCTION specifies the function to allocate in. */
500 static rtx
501 assign_stack_local_1 (mode, size, align, function)
502 enum machine_mode mode;
503 HOST_WIDE_INT size;
504 int align;
505 struct function *function;
507 register rtx x, addr;
508 int bigend_correction = 0;
509 int alignment;
511 /* Allocate in the memory associated with the function in whose frame
512 we are assigning. */
513 if (function != current_function)
514 push_obstacks (function->function_obstack,
515 function->function_maybepermanent_obstack);
517 if (align == 0)
519 tree type;
521 alignment = GET_MODE_ALIGNMENT (mode);
522 if (mode == BLKmode)
523 alignment = BIGGEST_ALIGNMENT;
525 /* Allow the target to (possibly) increase the alignment of this
526 stack slot. */
527 type = type_for_mode (mode, 0);
528 if (type)
529 alignment = LOCAL_ALIGNMENT (type, alignment);
531 alignment /= BITS_PER_UNIT;
533 else if (align == -1)
535 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536 size = CEIL_ROUND (size, alignment);
538 else
539 alignment = align / BITS_PER_UNIT;
541 #ifdef FRAME_GROWS_DOWNWARD
542 function->x_frame_offset -= size;
543 #endif
545 /* Round frame offset to that alignment.
546 We must be careful here, since FRAME_OFFSET might be negative and
547 division with a negative dividend isn't as well defined as we might
548 like. So we instead assume that ALIGNMENT is a power of two and
549 use logical operations which are unambiguous. */
550 #ifdef FRAME_GROWS_DOWNWARD
551 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
552 #else
553 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
554 #endif
556 /* On a big-endian machine, if we are allocating more space than we will use,
557 use the least significant bytes of those that are allocated. */
558 if (BYTES_BIG_ENDIAN && mode != BLKmode)
559 bigend_correction = size - GET_MODE_SIZE (mode);
561 /* If we have already instantiated virtual registers, return the actual
562 address relative to the frame pointer. */
563 if (function == current_function && virtuals_instantiated)
564 addr = plus_constant (frame_pointer_rtx,
565 (frame_offset + bigend_correction
566 + STARTING_FRAME_OFFSET));
567 else
568 addr = plus_constant (virtual_stack_vars_rtx,
569 function->x_frame_offset + bigend_correction);
571 #ifndef FRAME_GROWS_DOWNWARD
572 function->x_frame_offset += size;
573 #endif
575 x = gen_rtx_MEM (mode, addr);
577 function->x_stack_slot_list
578 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
580 if (function != current_function)
581 pop_obstacks ();
583 return x;
586 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
587 current function. */
589 assign_stack_local (mode, size, align)
590 enum machine_mode mode;
591 HOST_WIDE_INT size;
592 int align;
594 return assign_stack_local_1 (mode, size, align, current_function);
597 /* Allocate a temporary stack slot and record it for possible later
598 reuse.
600 MODE is the machine mode to be given to the returned rtx.
602 SIZE is the size in units of the space required. We do no rounding here
603 since assign_stack_local will do any required rounding.
605 KEEP is 1 if this slot is to be retained after a call to
606 free_temp_slots. Automatic variables for a block are allocated
607 with this flag. KEEP is 2 if we allocate a longer term temporary,
608 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
609 if we are to allocate something at an inner level to be treated as
610 a variable in the block (e.g., a SAVE_EXPR).
612 TYPE is the type that will be used for the stack slot. */
614 static rtx
615 assign_stack_temp_for_type (mode, size, keep, type)
616 enum machine_mode mode;
617 HOST_WIDE_INT size;
618 int keep;
619 tree type;
621 int align;
622 int alias_set;
623 struct temp_slot *p, *best_p = 0;
625 /* If SIZE is -1 it means that somebody tried to allocate a temporary
626 of a variable size. */
627 if (size == -1)
628 abort ();
630 /* If we know the alias set for the memory that will be used, use
631 it. If there's no TYPE, then we don't know anything about the
632 alias set for the memory. */
633 if (type)
634 alias_set = get_alias_set (type);
635 else
636 alias_set = 0;
638 align = GET_MODE_ALIGNMENT (mode);
639 if (mode == BLKmode)
640 align = BIGGEST_ALIGNMENT;
642 if (! type)
643 type = type_for_mode (mode, 0);
644 if (type)
645 align = LOCAL_ALIGNMENT (type, align);
647 /* Try to find an available, already-allocated temporary of the proper
648 mode which meets the size and alignment requirements. Choose the
649 smallest one with the closest alignment. */
650 for (p = temp_slots; p; p = p->next)
651 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
652 && ! p->in_use
653 && (!flag_strict_aliasing
654 || (alias_set && p->alias_set == alias_set))
655 && (best_p == 0 || best_p->size > p->size
656 || (best_p->size == p->size && best_p->align > p->align)))
658 if (p->align == align && p->size == size)
660 best_p = 0;
661 break;
663 best_p = p;
666 /* Make our best, if any, the one to use. */
667 if (best_p)
669 /* If there are enough aligned bytes left over, make them into a new
670 temp_slot so that the extra bytes don't get wasted. Do this only
671 for BLKmode slots, so that we can be sure of the alignment. */
672 if (GET_MODE (best_p->slot) == BLKmode
673 /* We can't split slots if -fstrict-aliasing because the
674 information about the alias set for the new slot will be
675 lost. */
676 && !flag_strict_aliasing)
678 int alignment = best_p->align / BITS_PER_UNIT;
679 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
681 if (best_p->size - rounded_size >= alignment)
683 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
684 p->in_use = p->addr_taken = 0;
685 p->size = best_p->size - rounded_size;
686 p->base_offset = best_p->base_offset + rounded_size;
687 p->full_size = best_p->full_size - rounded_size;
688 p->slot = gen_rtx_MEM (BLKmode,
689 plus_constant (XEXP (best_p->slot, 0),
690 rounded_size));
691 p->align = best_p->align;
692 p->address = 0;
693 p->rtl_expr = 0;
694 p->next = temp_slots;
695 temp_slots = p;
697 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
698 stack_slot_list);
700 best_p->size = rounded_size;
701 best_p->full_size = rounded_size;
705 p = best_p;
708 /* If we still didn't find one, make a new temporary. */
709 if (p == 0)
711 HOST_WIDE_INT frame_offset_old = frame_offset;
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
715 /* We are passing an explicit alignment request to assign_stack_local.
716 One side effect of that is assign_stack_local will not round SIZE
717 to ensure the frame offset remains suitably aligned.
719 So for requests which depended on the rounding of SIZE, we go ahead
720 and round it now. We also make sure ALIGNMENT is at least
721 BIGGEST_ALIGNMENT. */
722 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
723 abort();
724 p->slot = assign_stack_local (mode,
725 (mode == BLKmode
726 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
727 : size),
728 align);
730 p->align = align;
731 p->alias_set = alias_set;
733 /* The following slot size computation is necessary because we don't
734 know the actual size of the temporary slot until assign_stack_local
735 has performed all the frame alignment and size rounding for the
736 requested temporary. Note that extra space added for alignment
737 can be either above or below this stack slot depending on which
738 way the frame grows. We include the extra space if and only if it
739 is above this slot. */
740 #ifdef FRAME_GROWS_DOWNWARD
741 p->size = frame_offset_old - frame_offset;
742 #else
743 p->size = size;
744 #endif
746 /* Now define the fields used by combine_temp_slots. */
747 #ifdef FRAME_GROWS_DOWNWARD
748 p->base_offset = frame_offset;
749 p->full_size = frame_offset_old - frame_offset;
750 #else
751 p->base_offset = frame_offset_old;
752 p->full_size = frame_offset - frame_offset_old;
753 #endif
754 p->address = 0;
755 p->next = temp_slots;
756 temp_slots = p;
759 p->in_use = 1;
760 p->addr_taken = 0;
761 p->rtl_expr = seq_rtl_expr;
763 if (keep == 2)
765 p->level = target_temp_slot_level;
766 p->keep = 0;
768 else if (keep == 3)
770 p->level = var_temp_slot_level;
771 p->keep = 0;
773 else
775 p->level = temp_slot_level;
776 p->keep = keep;
779 /* We may be reusing an old slot, so clear any MEM flags that may have been
780 set from before. */
781 RTX_UNCHANGING_P (p->slot) = 0;
782 MEM_IN_STRUCT_P (p->slot) = 0;
783 MEM_SCALAR_P (p->slot) = 0;
784 MEM_ALIAS_SET (p->slot) = 0;
785 return p->slot;
788 /* Allocate a temporary stack slot and record it for possible later
789 reuse. First three arguments are same as in preceding function. */
792 assign_stack_temp (mode, size, keep)
793 enum machine_mode mode;
794 HOST_WIDE_INT size;
795 int keep;
797 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
800 /* Assign a temporary of given TYPE.
801 KEEP is as for assign_stack_temp.
802 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
803 it is 0 if a register is OK.
804 DONT_PROMOTE is 1 if we should not promote values in register
805 to wider modes. */
808 assign_temp (type, keep, memory_required, dont_promote)
809 tree type;
810 int keep;
811 int memory_required;
812 int dont_promote;
814 enum machine_mode mode = TYPE_MODE (type);
815 int unsignedp = TREE_UNSIGNED (type);
817 if (mode == BLKmode || memory_required)
819 HOST_WIDE_INT size = int_size_in_bytes (type);
820 rtx tmp;
822 /* Unfortunately, we don't yet know how to allocate variable-sized
823 temporaries. However, sometimes we have a fixed upper limit on
824 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
825 instead. This is the case for Chill variable-sized strings. */
826 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
827 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
828 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
829 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
831 tmp = assign_stack_temp_for_type (mode, size, keep, type);
832 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
833 return tmp;
836 #ifndef PROMOTE_FOR_CALL_ONLY
837 if (! dont_promote)
838 mode = promote_mode (type, mode, &unsignedp, 0);
839 #endif
841 return gen_reg_rtx (mode);
844 /* Combine temporary stack slots which are adjacent on the stack.
846 This allows for better use of already allocated stack space. This is only
847 done for BLKmode slots because we can be sure that we won't have alignment
848 problems in this case. */
850 void
851 combine_temp_slots ()
853 struct temp_slot *p, *q;
854 struct temp_slot *prev_p, *prev_q;
855 int num_slots;
857 /* We can't combine slots, because the information about which slot
858 is in which alias set will be lost. */
859 if (flag_strict_aliasing)
860 return;
862 /* If there are a lot of temp slots, don't do anything unless
863 high levels of optimizaton. */
864 if (! flag_expensive_optimizations)
865 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
866 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
867 return;
869 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
871 int delete_p = 0;
873 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
874 for (q = p->next, prev_q = p; q; q = prev_q->next)
876 int delete_q = 0;
877 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
879 if (p->base_offset + p->full_size == q->base_offset)
881 /* Q comes after P; combine Q into P. */
882 p->size += q->size;
883 p->full_size += q->full_size;
884 delete_q = 1;
886 else if (q->base_offset + q->full_size == p->base_offset)
888 /* P comes after Q; combine P into Q. */
889 q->size += p->size;
890 q->full_size += p->full_size;
891 delete_p = 1;
892 break;
895 /* Either delete Q or advance past it. */
896 if (delete_q)
897 prev_q->next = q->next;
898 else
899 prev_q = q;
901 /* Either delete P or advance past it. */
902 if (delete_p)
904 if (prev_p)
905 prev_p->next = p->next;
906 else
907 temp_slots = p->next;
909 else
910 prev_p = p;
914 /* Find the temp slot corresponding to the object at address X. */
916 static struct temp_slot *
917 find_temp_slot_from_address (x)
918 rtx x;
920 struct temp_slot *p;
921 rtx next;
923 for (p = temp_slots; p; p = p->next)
925 if (! p->in_use)
926 continue;
928 else if (XEXP (p->slot, 0) == x
929 || p->address == x
930 || (GET_CODE (x) == PLUS
931 && XEXP (x, 0) == virtual_stack_vars_rtx
932 && GET_CODE (XEXP (x, 1)) == CONST_INT
933 && INTVAL (XEXP (x, 1)) >= p->base_offset
934 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
935 return p;
937 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
938 for (next = p->address; next; next = XEXP (next, 1))
939 if (XEXP (next, 0) == x)
940 return p;
943 return 0;
946 /* Indicate that NEW is an alternate way of referring to the temp slot
947 that previously was known by OLD. */
949 void
950 update_temp_slot_address (old, new)
951 rtx old, new;
953 struct temp_slot *p = find_temp_slot_from_address (old);
955 /* If none, return. Else add NEW as an alias. */
956 if (p == 0)
957 return;
958 else if (p->address == 0)
959 p->address = new;
960 else
962 if (GET_CODE (p->address) != EXPR_LIST)
963 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
965 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
969 /* If X could be a reference to a temporary slot, mark the fact that its
970 address was taken. */
972 void
973 mark_temp_addr_taken (x)
974 rtx x;
976 struct temp_slot *p;
978 if (x == 0)
979 return;
981 /* If X is not in memory or is at a constant address, it cannot be in
982 a temporary slot. */
983 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
984 return;
986 p = find_temp_slot_from_address (XEXP (x, 0));
987 if (p != 0)
988 p->addr_taken = 1;
991 /* If X could be a reference to a temporary slot, mark that slot as
992 belonging to the to one level higher than the current level. If X
993 matched one of our slots, just mark that one. Otherwise, we can't
994 easily predict which it is, so upgrade all of them. Kept slots
995 need not be touched.
997 This is called when an ({...}) construct occurs and a statement
998 returns a value in memory. */
1000 void
1001 preserve_temp_slots (x)
1002 rtx x;
1004 struct temp_slot *p = 0;
1006 /* If there is no result, we still might have some objects whose address
1007 were taken, so we need to make sure they stay around. */
1008 if (x == 0)
1010 for (p = temp_slots; p; p = p->next)
1011 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1012 p->level--;
1014 return;
1017 /* If X is a register that is being used as a pointer, see if we have
1018 a temporary slot we know it points to. To be consistent with
1019 the code below, we really should preserve all non-kept slots
1020 if we can't find a match, but that seems to be much too costly. */
1021 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1022 p = find_temp_slot_from_address (x);
1024 /* If X is not in memory or is at a constant address, it cannot be in
1025 a temporary slot, but it can contain something whose address was
1026 taken. */
1027 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1029 for (p = temp_slots; p; p = p->next)
1030 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1031 p->level--;
1033 return;
1036 /* First see if we can find a match. */
1037 if (p == 0)
1038 p = find_temp_slot_from_address (XEXP (x, 0));
1040 if (p != 0)
1042 /* Move everything at our level whose address was taken to our new
1043 level in case we used its address. */
1044 struct temp_slot *q;
1046 if (p->level == temp_slot_level)
1048 for (q = temp_slots; q; q = q->next)
1049 if (q != p && q->addr_taken && q->level == p->level)
1050 q->level--;
1052 p->level--;
1053 p->addr_taken = 0;
1055 return;
1058 /* Otherwise, preserve all non-kept slots at this level. */
1059 for (p = temp_slots; p; p = p->next)
1060 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1061 p->level--;
1064 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1065 with that RTL_EXPR, promote it into a temporary slot at the present
1066 level so it will not be freed when we free slots made in the
1067 RTL_EXPR. */
1069 void
1070 preserve_rtl_expr_result (x)
1071 rtx x;
1073 struct temp_slot *p;
1075 /* If X is not in memory or is at a constant address, it cannot be in
1076 a temporary slot. */
1077 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1078 return;
1080 /* If we can find a match, move it to our level unless it is already at
1081 an upper level. */
1082 p = find_temp_slot_from_address (XEXP (x, 0));
1083 if (p != 0)
1085 p->level = MIN (p->level, temp_slot_level);
1086 p->rtl_expr = 0;
1089 return;
1092 /* Free all temporaries used so far. This is normally called at the end
1093 of generating code for a statement. Don't free any temporaries
1094 currently in use for an RTL_EXPR that hasn't yet been emitted.
1095 We could eventually do better than this since it can be reused while
1096 generating the same RTL_EXPR, but this is complex and probably not
1097 worthwhile. */
1099 void
1100 free_temp_slots ()
1102 struct temp_slot *p;
1104 for (p = temp_slots; p; p = p->next)
1105 if (p->in_use && p->level == temp_slot_level && ! p->keep
1106 && p->rtl_expr == 0)
1107 p->in_use = 0;
1109 combine_temp_slots ();
1112 /* Free all temporary slots used in T, an RTL_EXPR node. */
1114 void
1115 free_temps_for_rtl_expr (t)
1116 tree t;
1118 struct temp_slot *p;
1120 for (p = temp_slots; p; p = p->next)
1121 if (p->rtl_expr == t)
1122 p->in_use = 0;
1124 combine_temp_slots ();
1127 /* Mark all temporaries ever allocated in this function as not suitable
1128 for reuse until the current level is exited. */
1130 void
1131 mark_all_temps_used ()
1133 struct temp_slot *p;
1135 for (p = temp_slots; p; p = p->next)
1137 p->in_use = p->keep = 1;
1138 p->level = MIN (p->level, temp_slot_level);
1142 /* Push deeper into the nesting level for stack temporaries. */
1144 void
1145 push_temp_slots ()
1147 temp_slot_level++;
1150 /* Likewise, but save the new level as the place to allocate variables
1151 for blocks. */
1153 #if 0
1154 void
1155 push_temp_slots_for_block ()
1157 push_temp_slots ();
1159 var_temp_slot_level = temp_slot_level;
1162 /* Likewise, but save the new level as the place to allocate temporaries
1163 for TARGET_EXPRs. */
1165 void
1166 push_temp_slots_for_target ()
1168 push_temp_slots ();
1170 target_temp_slot_level = temp_slot_level;
1173 /* Set and get the value of target_temp_slot_level. The only
1174 permitted use of these functions is to save and restore this value. */
1177 get_target_temp_slot_level ()
1179 return target_temp_slot_level;
1182 void
1183 set_target_temp_slot_level (level)
1184 int level;
1186 target_temp_slot_level = level;
1188 #endif
1190 /* Pop a temporary nesting level. All slots in use in the current level
1191 are freed. */
1193 void
1194 pop_temp_slots ()
1196 struct temp_slot *p;
1198 for (p = temp_slots; p; p = p->next)
1199 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1200 p->in_use = 0;
1202 combine_temp_slots ();
1204 temp_slot_level--;
1207 /* Initialize temporary slots. */
1209 void
1210 init_temp_slots ()
1212 /* We have not allocated any temporaries yet. */
1213 temp_slots = 0;
1214 temp_slot_level = 0;
1215 var_temp_slot_level = 0;
1216 target_temp_slot_level = 0;
1219 /* Retroactively move an auto variable from a register to a stack slot.
1220 This is done when an address-reference to the variable is seen. */
1222 void
1223 put_var_into_stack (decl)
1224 tree decl;
1226 register rtx reg;
1227 enum machine_mode promoted_mode, decl_mode;
1228 struct function *function = 0;
1229 tree context;
1230 int can_use_addressof;
1232 context = decl_function_context (decl);
1234 /* Get the current rtl used for this object and its original mode. */
1235 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1237 /* No need to do anything if decl has no rtx yet
1238 since in that case caller is setting TREE_ADDRESSABLE
1239 and a stack slot will be assigned when the rtl is made. */
1240 if (reg == 0)
1241 return;
1243 /* Get the declared mode for this object. */
1244 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1245 : DECL_MODE (decl));
1246 /* Get the mode it's actually stored in. */
1247 promoted_mode = GET_MODE (reg);
1249 /* If this variable comes from an outer function,
1250 find that function's saved context. */
1251 if (context != current_function_decl && context != inline_function_decl)
1252 for (function = outer_function_chain; function; function = function->next)
1253 if (function->decl == context)
1254 break;
1256 /* If this is a variable-size object with a pseudo to address it,
1257 put that pseudo into the stack, if the var is nonlocal. */
1258 if (DECL_NONLOCAL (decl)
1259 && GET_CODE (reg) == MEM
1260 && GET_CODE (XEXP (reg, 0)) == REG
1261 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1263 reg = XEXP (reg, 0);
1264 decl_mode = promoted_mode = GET_MODE (reg);
1267 can_use_addressof
1268 = (function == 0
1269 && optimize > 0
1270 /* FIXME make it work for promoted modes too */
1271 && decl_mode == promoted_mode
1272 #ifdef NON_SAVING_SETJMP
1273 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1274 #endif
1277 /* If we can't use ADDRESSOF, make sure we see through one we already
1278 generated. */
1279 if (! can_use_addressof && GET_CODE (reg) == MEM
1280 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1281 reg = XEXP (XEXP (reg, 0), 0);
1283 /* Now we should have a value that resides in one or more pseudo regs. */
1285 if (GET_CODE (reg) == REG)
1287 /* If this variable lives in the current function and we don't need
1288 to put things in the stack for the sake of setjmp, try to keep it
1289 in a register until we know we actually need the address. */
1290 if (can_use_addressof)
1291 gen_mem_addressof (reg, decl);
1292 else
1293 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1294 promoted_mode, decl_mode,
1295 TREE_SIDE_EFFECTS (decl), 0,
1296 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1299 else if (GET_CODE (reg) == CONCAT)
1301 /* A CONCAT contains two pseudos; put them both in the stack.
1302 We do it so they end up consecutive. */
1303 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1304 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1305 #ifdef FRAME_GROWS_DOWNWARD
1306 /* Since part 0 should have a lower address, do it second. */
1307 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1308 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1309 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1311 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1312 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1313 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1315 #else
1316 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1317 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1318 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1320 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1321 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1322 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1324 #endif
1326 /* Change the CONCAT into a combined MEM for both parts. */
1327 PUT_CODE (reg, MEM);
1328 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1329 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1331 /* The two parts are in memory order already.
1332 Use the lower parts address as ours. */
1333 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1334 /* Prevent sharing of rtl that might lose. */
1335 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1336 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1338 else
1339 return;
1341 if (current_function_check_memory_usage)
1342 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1343 XEXP (reg, 0), Pmode,
1344 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1345 TYPE_MODE (sizetype),
1346 GEN_INT (MEMORY_USE_RW),
1347 TYPE_MODE (integer_type_node));
1350 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1351 into the stack frame of FUNCTION (0 means the current function).
1352 DECL_MODE is the machine mode of the user-level data type.
1353 PROMOTED_MODE is the machine mode of the register.
1354 VOLATILE_P is nonzero if this is for a "volatile" decl.
1355 USED_P is nonzero if this reg might have already been used in an insn. */
1357 static void
1358 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1359 original_regno, used_p, ht)
1360 struct function *function;
1361 rtx reg;
1362 tree type;
1363 enum machine_mode promoted_mode, decl_mode;
1364 int volatile_p;
1365 int original_regno;
1366 int used_p;
1367 struct hash_table *ht;
1369 struct function *func = function ? function : current_function;
1370 rtx new = 0;
1371 int regno = original_regno;
1373 if (regno == 0)
1374 regno = REGNO (reg);
1376 if (regno < func->x_max_parm_reg)
1377 new = func->x_parm_reg_stack_loc[regno];
1378 if (new == 0)
1379 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1381 PUT_CODE (reg, MEM);
1382 PUT_MODE (reg, decl_mode);
1383 XEXP (reg, 0) = XEXP (new, 0);
1384 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1385 MEM_VOLATILE_P (reg) = volatile_p;
1387 /* If this is a memory ref that contains aggregate components,
1388 mark it as such for cse and loop optimize. If we are reusing a
1389 previously generated stack slot, then we need to copy the bit in
1390 case it was set for other reasons. For instance, it is set for
1391 __builtin_va_alist. */
1392 MEM_SET_IN_STRUCT_P (reg,
1393 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1394 MEM_ALIAS_SET (reg) = get_alias_set (type);
1396 /* Now make sure that all refs to the variable, previously made
1397 when it was a register, are fixed up to be valid again. */
1399 if (used_p && function != 0)
1401 struct var_refs_queue *temp;
1403 temp
1404 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1405 temp->modified = reg;
1406 temp->promoted_mode = promoted_mode;
1407 temp->unsignedp = TREE_UNSIGNED (type);
1408 temp->next = function->fixup_var_refs_queue;
1409 function->fixup_var_refs_queue = temp;
1411 else if (used_p)
1412 /* Variable is local; fix it up now. */
1413 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1416 static void
1417 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1418 rtx var;
1419 enum machine_mode promoted_mode;
1420 int unsignedp;
1421 struct hash_table *ht;
1423 tree pending;
1424 rtx first_insn = get_insns ();
1425 struct sequence_stack *stack = seq_stack;
1426 tree rtl_exps = rtl_expr_chain;
1428 /* Must scan all insns for stack-refs that exceed the limit. */
1429 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1430 stack == 0, ht);
1431 /* If there's a hash table, it must record all uses of VAR. */
1432 if (ht)
1433 return;
1435 /* Scan all pending sequences too. */
1436 for (; stack; stack = stack->next)
1438 push_to_sequence (stack->first);
1439 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1440 stack->first, stack->next != 0, 0);
1441 /* Update remembered end of sequence
1442 in case we added an insn at the end. */
1443 stack->last = get_last_insn ();
1444 end_sequence ();
1447 /* Scan all waiting RTL_EXPRs too. */
1448 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1450 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1451 if (seq != const0_rtx && seq != 0)
1453 push_to_sequence (seq);
1454 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1456 end_sequence ();
1460 /* Scan the catch clauses for exception handling too. */
1461 push_to_sequence (catch_clauses);
1462 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1463 0, 0);
1464 end_sequence ();
1467 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1468 some part of an insn. Return a struct fixup_replacement whose OLD
1469 value is equal to X. Allocate a new structure if no such entry exists. */
1471 static struct fixup_replacement *
1472 find_fixup_replacement (replacements, x)
1473 struct fixup_replacement **replacements;
1474 rtx x;
1476 struct fixup_replacement *p;
1478 /* See if we have already replaced this. */
1479 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1482 if (p == 0)
1484 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1485 p->old = x;
1486 p->new = 0;
1487 p->next = *replacements;
1488 *replacements = p;
1491 return p;
1494 /* Scan the insn-chain starting with INSN for refs to VAR
1495 and fix them up. TOPLEVEL is nonzero if this chain is the
1496 main chain of insns for the current function. */
1498 static void
1499 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1500 rtx var;
1501 enum machine_mode promoted_mode;
1502 int unsignedp;
1503 rtx insn;
1504 int toplevel;
1505 struct hash_table *ht;
1507 rtx call_dest = 0;
1508 rtx insn_list = NULL_RTX;
1510 /* If we already know which INSNs reference VAR there's no need
1511 to walk the entire instruction chain. */
1512 if (ht)
1514 insn_list = ((struct insns_for_mem_entry *)
1515 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1516 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1517 insn_list = XEXP (insn_list, 1);
1520 while (insn)
1522 rtx next = NEXT_INSN (insn);
1523 rtx set, prev, prev_set;
1524 rtx note;
1526 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1528 /* Remember the notes in case we delete the insn. */
1529 note = REG_NOTES (insn);
1531 /* If this is a CLOBBER of VAR, delete it.
1533 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1534 and REG_RETVAL notes too. */
1535 if (GET_CODE (PATTERN (insn)) == CLOBBER
1536 && (XEXP (PATTERN (insn), 0) == var
1537 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1538 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1539 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1541 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1542 /* The REG_LIBCALL note will go away since we are going to
1543 turn INSN into a NOTE, so just delete the
1544 corresponding REG_RETVAL note. */
1545 remove_note (XEXP (note, 0),
1546 find_reg_note (XEXP (note, 0), REG_RETVAL,
1547 NULL_RTX));
1549 /* In unoptimized compilation, we shouldn't call delete_insn
1550 except in jump.c doing warnings. */
1551 PUT_CODE (insn, NOTE);
1552 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1553 NOTE_SOURCE_FILE (insn) = 0;
1556 /* The insn to load VAR from a home in the arglist
1557 is now a no-op. When we see it, just delete it.
1558 Similarly if this is storing VAR from a register from which
1559 it was loaded in the previous insn. This will occur
1560 when an ADDRESSOF was made for an arglist slot. */
1561 else if (toplevel
1562 && (set = single_set (insn)) != 0
1563 && SET_DEST (set) == var
1564 /* If this represents the result of an insn group,
1565 don't delete the insn. */
1566 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1567 && (rtx_equal_p (SET_SRC (set), var)
1568 || (GET_CODE (SET_SRC (set)) == REG
1569 && (prev = prev_nonnote_insn (insn)) != 0
1570 && (prev_set = single_set (prev)) != 0
1571 && SET_DEST (prev_set) == SET_SRC (set)
1572 && rtx_equal_p (SET_SRC (prev_set), var))))
1574 /* In unoptimized compilation, we shouldn't call delete_insn
1575 except in jump.c doing warnings. */
1576 PUT_CODE (insn, NOTE);
1577 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1578 NOTE_SOURCE_FILE (insn) = 0;
1579 if (insn == last_parm_insn)
1580 last_parm_insn = PREV_INSN (next);
1582 else
1584 struct fixup_replacement *replacements = 0;
1585 rtx next_insn = NEXT_INSN (insn);
1587 if (SMALL_REGISTER_CLASSES)
1589 /* If the insn that copies the results of a CALL_INSN
1590 into a pseudo now references VAR, we have to use an
1591 intermediate pseudo since we want the life of the
1592 return value register to be only a single insn.
1594 If we don't use an intermediate pseudo, such things as
1595 address computations to make the address of VAR valid
1596 if it is not can be placed between the CALL_INSN and INSN.
1598 To make sure this doesn't happen, we record the destination
1599 of the CALL_INSN and see if the next insn uses both that
1600 and VAR. */
1602 if (call_dest != 0 && GET_CODE (insn) == INSN
1603 && reg_mentioned_p (var, PATTERN (insn))
1604 && reg_mentioned_p (call_dest, PATTERN (insn)))
1606 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1608 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1610 PATTERN (insn) = replace_rtx (PATTERN (insn),
1611 call_dest, temp);
1614 if (GET_CODE (insn) == CALL_INSN
1615 && GET_CODE (PATTERN (insn)) == SET)
1616 call_dest = SET_DEST (PATTERN (insn));
1617 else if (GET_CODE (insn) == CALL_INSN
1618 && GET_CODE (PATTERN (insn)) == PARALLEL
1619 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1620 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1621 else
1622 call_dest = 0;
1625 /* See if we have to do anything to INSN now that VAR is in
1626 memory. If it needs to be loaded into a pseudo, use a single
1627 pseudo for the entire insn in case there is a MATCH_DUP
1628 between two operands. We pass a pointer to the head of
1629 a list of struct fixup_replacements. If fixup_var_refs_1
1630 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1631 it will record them in this list.
1633 If it allocated a pseudo for any replacement, we copy into
1634 it here. */
1636 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1637 &replacements);
1639 /* If this is last_parm_insn, and any instructions were output
1640 after it to fix it up, then we must set last_parm_insn to
1641 the last such instruction emitted. */
1642 if (insn == last_parm_insn)
1643 last_parm_insn = PREV_INSN (next_insn);
1645 while (replacements)
1647 if (GET_CODE (replacements->new) == REG)
1649 rtx insert_before;
1650 rtx seq;
1652 /* OLD might be a (subreg (mem)). */
1653 if (GET_CODE (replacements->old) == SUBREG)
1654 replacements->old
1655 = fixup_memory_subreg (replacements->old, insn, 0);
1656 else
1657 replacements->old
1658 = fixup_stack_1 (replacements->old, insn);
1660 insert_before = insn;
1662 /* If we are changing the mode, do a conversion.
1663 This might be wasteful, but combine.c will
1664 eliminate much of the waste. */
1666 if (GET_MODE (replacements->new)
1667 != GET_MODE (replacements->old))
1669 start_sequence ();
1670 convert_move (replacements->new,
1671 replacements->old, unsignedp);
1672 seq = gen_sequence ();
1673 end_sequence ();
1675 else
1676 seq = gen_move_insn (replacements->new,
1677 replacements->old);
1679 emit_insn_before (seq, insert_before);
1682 replacements = replacements->next;
1686 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1687 But don't touch other insns referred to by reg-notes;
1688 we will get them elsewhere. */
1689 while (note)
1691 if (GET_CODE (note) != INSN_LIST)
1692 XEXP (note, 0)
1693 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1694 note = XEXP (note, 1);
1698 if (!ht)
1699 insn = next;
1700 else if (insn_list)
1702 insn = XEXP (insn_list, 0);
1703 insn_list = XEXP (insn_list, 1);
1705 else
1706 insn = NULL_RTX;
1710 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1711 See if the rtx expression at *LOC in INSN needs to be changed.
1713 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1714 contain a list of original rtx's and replacements. If we find that we need
1715 to modify this insn by replacing a memory reference with a pseudo or by
1716 making a new MEM to implement a SUBREG, we consult that list to see if
1717 we have already chosen a replacement. If none has already been allocated,
1718 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1719 or the SUBREG, as appropriate, to the pseudo. */
1721 static void
1722 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1723 register rtx var;
1724 enum machine_mode promoted_mode;
1725 register rtx *loc;
1726 rtx insn;
1727 struct fixup_replacement **replacements;
1729 register int i;
1730 register rtx x = *loc;
1731 RTX_CODE code = GET_CODE (x);
1732 register const char *fmt;
1733 register rtx tem, tem1;
1734 struct fixup_replacement *replacement;
1736 switch (code)
1738 case ADDRESSOF:
1739 if (XEXP (x, 0) == var)
1741 /* Prevent sharing of rtl that might lose. */
1742 rtx sub = copy_rtx (XEXP (var, 0));
1744 if (! validate_change (insn, loc, sub, 0))
1746 rtx y = gen_reg_rtx (GET_MODE (sub));
1747 rtx seq, new_insn;
1749 /* We should be able to replace with a register or all is lost.
1750 Note that we can't use validate_change to verify this, since
1751 we're not caring for replacing all dups simultaneously. */
1752 if (! validate_replace_rtx (*loc, y, insn))
1753 abort ();
1755 /* Careful! First try to recognize a direct move of the
1756 value, mimicking how things are done in gen_reload wrt
1757 PLUS. Consider what happens when insn is a conditional
1758 move instruction and addsi3 clobbers flags. */
1760 start_sequence ();
1761 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1762 seq = gen_sequence ();
1763 end_sequence ();
1765 if (recog_memoized (new_insn) < 0)
1767 /* That failed. Fall back on force_operand and hope. */
1769 start_sequence ();
1770 force_operand (sub, y);
1771 seq = gen_sequence ();
1772 end_sequence ();
1775 #ifdef HAVE_cc0
1776 /* Don't separate setter from user. */
1777 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1778 insn = PREV_INSN (insn);
1779 #endif
1781 emit_insn_before (seq, insn);
1784 return;
1786 case MEM:
1787 if (var == x)
1789 /* If we already have a replacement, use it. Otherwise,
1790 try to fix up this address in case it is invalid. */
1792 replacement = find_fixup_replacement (replacements, var);
1793 if (replacement->new)
1795 *loc = replacement->new;
1796 return;
1799 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1801 /* Unless we are forcing memory to register or we changed the mode,
1802 we can leave things the way they are if the insn is valid. */
1804 INSN_CODE (insn) = -1;
1805 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1806 && recog_memoized (insn) >= 0)
1807 return;
1809 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1810 return;
1813 /* If X contains VAR, we need to unshare it here so that we update
1814 each occurrence separately. But all identical MEMs in one insn
1815 must be replaced with the same rtx because of the possibility of
1816 MATCH_DUPs. */
1818 if (reg_mentioned_p (var, x))
1820 replacement = find_fixup_replacement (replacements, x);
1821 if (replacement->new == 0)
1822 replacement->new = copy_most_rtx (x, var);
1824 *loc = x = replacement->new;
1826 break;
1828 case REG:
1829 case CC0:
1830 case PC:
1831 case CONST_INT:
1832 case CONST:
1833 case SYMBOL_REF:
1834 case LABEL_REF:
1835 case CONST_DOUBLE:
1836 return;
1838 case SIGN_EXTRACT:
1839 case ZERO_EXTRACT:
1840 /* Note that in some cases those types of expressions are altered
1841 by optimize_bit_field, and do not survive to get here. */
1842 if (XEXP (x, 0) == var
1843 || (GET_CODE (XEXP (x, 0)) == SUBREG
1844 && SUBREG_REG (XEXP (x, 0)) == var))
1846 /* Get TEM as a valid MEM in the mode presently in the insn.
1848 We don't worry about the possibility of MATCH_DUP here; it
1849 is highly unlikely and would be tricky to handle. */
1851 tem = XEXP (x, 0);
1852 if (GET_CODE (tem) == SUBREG)
1854 if (GET_MODE_BITSIZE (GET_MODE (tem))
1855 > GET_MODE_BITSIZE (GET_MODE (var)))
1857 replacement = find_fixup_replacement (replacements, var);
1858 if (replacement->new == 0)
1859 replacement->new = gen_reg_rtx (GET_MODE (var));
1860 SUBREG_REG (tem) = replacement->new;
1862 else
1863 tem = fixup_memory_subreg (tem, insn, 0);
1865 else
1866 tem = fixup_stack_1 (tem, insn);
1868 /* Unless we want to load from memory, get TEM into the proper mode
1869 for an extract from memory. This can only be done if the
1870 extract is at a constant position and length. */
1872 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1873 && GET_CODE (XEXP (x, 2)) == CONST_INT
1874 && ! mode_dependent_address_p (XEXP (tem, 0))
1875 && ! MEM_VOLATILE_P (tem))
1877 enum machine_mode wanted_mode = VOIDmode;
1878 enum machine_mode is_mode = GET_MODE (tem);
1879 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1881 #ifdef HAVE_extzv
1882 if (GET_CODE (x) == ZERO_EXTRACT)
1884 wanted_mode
1885 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1886 if (wanted_mode == VOIDmode)
1887 wanted_mode = word_mode;
1889 #endif
1890 #ifdef HAVE_extv
1891 if (GET_CODE (x) == SIGN_EXTRACT)
1893 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1894 if (wanted_mode == VOIDmode)
1895 wanted_mode = word_mode;
1897 #endif
1898 /* If we have a narrower mode, we can do something. */
1899 if (wanted_mode != VOIDmode
1900 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1902 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1903 rtx old_pos = XEXP (x, 2);
1904 rtx newmem;
1906 /* If the bytes and bits are counted differently, we
1907 must adjust the offset. */
1908 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1909 offset = (GET_MODE_SIZE (is_mode)
1910 - GET_MODE_SIZE (wanted_mode) - offset);
1912 pos %= GET_MODE_BITSIZE (wanted_mode);
1914 newmem = gen_rtx_MEM (wanted_mode,
1915 plus_constant (XEXP (tem, 0), offset));
1916 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1917 MEM_COPY_ATTRIBUTES (newmem, tem);
1919 /* Make the change and see if the insn remains valid. */
1920 INSN_CODE (insn) = -1;
1921 XEXP (x, 0) = newmem;
1922 XEXP (x, 2) = GEN_INT (pos);
1924 if (recog_memoized (insn) >= 0)
1925 return;
1927 /* Otherwise, restore old position. XEXP (x, 0) will be
1928 restored later. */
1929 XEXP (x, 2) = old_pos;
1933 /* If we get here, the bitfield extract insn can't accept a memory
1934 reference. Copy the input into a register. */
1936 tem1 = gen_reg_rtx (GET_MODE (tem));
1937 emit_insn_before (gen_move_insn (tem1, tem), insn);
1938 XEXP (x, 0) = tem1;
1939 return;
1941 break;
1943 case SUBREG:
1944 if (SUBREG_REG (x) == var)
1946 /* If this is a special SUBREG made because VAR was promoted
1947 from a wider mode, replace it with VAR and call ourself
1948 recursively, this time saying that the object previously
1949 had its current mode (by virtue of the SUBREG). */
1951 if (SUBREG_PROMOTED_VAR_P (x))
1953 *loc = var;
1954 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1955 return;
1958 /* If this SUBREG makes VAR wider, it has become a paradoxical
1959 SUBREG with VAR in memory, but these aren't allowed at this
1960 stage of the compilation. So load VAR into a pseudo and take
1961 a SUBREG of that pseudo. */
1962 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1964 replacement = find_fixup_replacement (replacements, var);
1965 if (replacement->new == 0)
1966 replacement->new = gen_reg_rtx (GET_MODE (var));
1967 SUBREG_REG (x) = replacement->new;
1968 return;
1971 /* See if we have already found a replacement for this SUBREG.
1972 If so, use it. Otherwise, make a MEM and see if the insn
1973 is recognized. If not, or if we should force MEM into a register,
1974 make a pseudo for this SUBREG. */
1975 replacement = find_fixup_replacement (replacements, x);
1976 if (replacement->new)
1978 *loc = replacement->new;
1979 return;
1982 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1984 INSN_CODE (insn) = -1;
1985 if (! flag_force_mem && recog_memoized (insn) >= 0)
1986 return;
1988 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1989 return;
1991 break;
1993 case SET:
1994 /* First do special simplification of bit-field references. */
1995 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1996 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1997 optimize_bit_field (x, insn, 0);
1998 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1999 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2000 optimize_bit_field (x, insn, NULL_PTR);
2002 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2003 into a register and then store it back out. */
2004 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2005 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2006 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2007 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2008 > GET_MODE_SIZE (GET_MODE (var))))
2010 replacement = find_fixup_replacement (replacements, var);
2011 if (replacement->new == 0)
2012 replacement->new = gen_reg_rtx (GET_MODE (var));
2014 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2015 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2018 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2019 insn into a pseudo and store the low part of the pseudo into VAR. */
2020 if (GET_CODE (SET_DEST (x)) == SUBREG
2021 && SUBREG_REG (SET_DEST (x)) == var
2022 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2023 > GET_MODE_SIZE (GET_MODE (var))))
2025 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2026 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2027 tem)),
2028 insn);
2029 break;
2033 rtx dest = SET_DEST (x);
2034 rtx src = SET_SRC (x);
2035 #ifdef HAVE_insv
2036 rtx outerdest = dest;
2037 #endif
2039 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2040 || GET_CODE (dest) == SIGN_EXTRACT
2041 || GET_CODE (dest) == ZERO_EXTRACT)
2042 dest = XEXP (dest, 0);
2044 if (GET_CODE (src) == SUBREG)
2045 src = XEXP (src, 0);
2047 /* If VAR does not appear at the top level of the SET
2048 just scan the lower levels of the tree. */
2050 if (src != var && dest != var)
2051 break;
2053 /* We will need to rerecognize this insn. */
2054 INSN_CODE (insn) = -1;
2056 #ifdef HAVE_insv
2057 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2059 /* Since this case will return, ensure we fixup all the
2060 operands here. */
2061 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2062 insn, replacements);
2063 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2064 insn, replacements);
2065 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2066 insn, replacements);
2068 tem = XEXP (outerdest, 0);
2070 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2071 that may appear inside a ZERO_EXTRACT.
2072 This was legitimate when the MEM was a REG. */
2073 if (GET_CODE (tem) == SUBREG
2074 && SUBREG_REG (tem) == var)
2075 tem = fixup_memory_subreg (tem, insn, 0);
2076 else
2077 tem = fixup_stack_1 (tem, insn);
2079 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2080 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2081 && ! mode_dependent_address_p (XEXP (tem, 0))
2082 && ! MEM_VOLATILE_P (tem))
2084 enum machine_mode wanted_mode;
2085 enum machine_mode is_mode = GET_MODE (tem);
2086 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2088 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2089 if (wanted_mode == VOIDmode)
2090 wanted_mode = word_mode;
2092 /* If we have a narrower mode, we can do something. */
2093 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2095 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2096 rtx old_pos = XEXP (outerdest, 2);
2097 rtx newmem;
2099 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2100 offset = (GET_MODE_SIZE (is_mode)
2101 - GET_MODE_SIZE (wanted_mode) - offset);
2103 pos %= GET_MODE_BITSIZE (wanted_mode);
2105 newmem = gen_rtx_MEM (wanted_mode,
2106 plus_constant (XEXP (tem, 0),
2107 offset));
2108 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2109 MEM_COPY_ATTRIBUTES (newmem, tem);
2111 /* Make the change and see if the insn remains valid. */
2112 INSN_CODE (insn) = -1;
2113 XEXP (outerdest, 0) = newmem;
2114 XEXP (outerdest, 2) = GEN_INT (pos);
2116 if (recog_memoized (insn) >= 0)
2117 return;
2119 /* Otherwise, restore old position. XEXP (x, 0) will be
2120 restored later. */
2121 XEXP (outerdest, 2) = old_pos;
2125 /* If we get here, the bit-field store doesn't allow memory
2126 or isn't located at a constant position. Load the value into
2127 a register, do the store, and put it back into memory. */
2129 tem1 = gen_reg_rtx (GET_MODE (tem));
2130 emit_insn_before (gen_move_insn (tem1, tem), insn);
2131 emit_insn_after (gen_move_insn (tem, tem1), insn);
2132 XEXP (outerdest, 0) = tem1;
2133 return;
2135 #endif
2137 /* STRICT_LOW_PART is a no-op on memory references
2138 and it can cause combinations to be unrecognizable,
2139 so eliminate it. */
2141 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2142 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2144 /* A valid insn to copy VAR into or out of a register
2145 must be left alone, to avoid an infinite loop here.
2146 If the reference to VAR is by a subreg, fix that up,
2147 since SUBREG is not valid for a memref.
2148 Also fix up the address of the stack slot.
2150 Note that we must not try to recognize the insn until
2151 after we know that we have valid addresses and no
2152 (subreg (mem ...) ...) constructs, since these interfere
2153 with determining the validity of the insn. */
2155 if ((SET_SRC (x) == var
2156 || (GET_CODE (SET_SRC (x)) == SUBREG
2157 && SUBREG_REG (SET_SRC (x)) == var))
2158 && (GET_CODE (SET_DEST (x)) == REG
2159 || (GET_CODE (SET_DEST (x)) == SUBREG
2160 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2161 && GET_MODE (var) == promoted_mode
2162 && x == single_set (insn))
2164 rtx pat;
2166 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2167 if (replacement->new)
2168 SET_SRC (x) = replacement->new;
2169 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2170 SET_SRC (x) = replacement->new
2171 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2172 else
2173 SET_SRC (x) = replacement->new
2174 = fixup_stack_1 (SET_SRC (x), insn);
2176 if (recog_memoized (insn) >= 0)
2177 return;
2179 /* INSN is not valid, but we know that we want to
2180 copy SET_SRC (x) to SET_DEST (x) in some way. So
2181 we generate the move and see whether it requires more
2182 than one insn. If it does, we emit those insns and
2183 delete INSN. Otherwise, we an just replace the pattern
2184 of INSN; we have already verified above that INSN has
2185 no other function that to do X. */
2187 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2188 if (GET_CODE (pat) == SEQUENCE)
2190 emit_insn_after (pat, insn);
2191 PUT_CODE (insn, NOTE);
2192 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2193 NOTE_SOURCE_FILE (insn) = 0;
2195 else
2196 PATTERN (insn) = pat;
2198 return;
2201 if ((SET_DEST (x) == var
2202 || (GET_CODE (SET_DEST (x)) == SUBREG
2203 && SUBREG_REG (SET_DEST (x)) == var))
2204 && (GET_CODE (SET_SRC (x)) == REG
2205 || (GET_CODE (SET_SRC (x)) == SUBREG
2206 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2207 && GET_MODE (var) == promoted_mode
2208 && x == single_set (insn))
2210 rtx pat;
2212 if (GET_CODE (SET_DEST (x)) == SUBREG)
2213 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2214 else
2215 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2217 if (recog_memoized (insn) >= 0)
2218 return;
2220 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2221 if (GET_CODE (pat) == SEQUENCE)
2223 emit_insn_after (pat, insn);
2224 PUT_CODE (insn, NOTE);
2225 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2226 NOTE_SOURCE_FILE (insn) = 0;
2228 else
2229 PATTERN (insn) = pat;
2231 return;
2234 /* Otherwise, storing into VAR must be handled specially
2235 by storing into a temporary and copying that into VAR
2236 with a new insn after this one. Note that this case
2237 will be used when storing into a promoted scalar since
2238 the insn will now have different modes on the input
2239 and output and hence will be invalid (except for the case
2240 of setting it to a constant, which does not need any
2241 change if it is valid). We generate extra code in that case,
2242 but combine.c will eliminate it. */
2244 if (dest == var)
2246 rtx temp;
2247 rtx fixeddest = SET_DEST (x);
2249 /* STRICT_LOW_PART can be discarded, around a MEM. */
2250 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2251 fixeddest = XEXP (fixeddest, 0);
2252 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2253 if (GET_CODE (fixeddest) == SUBREG)
2255 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2256 promoted_mode = GET_MODE (fixeddest);
2258 else
2259 fixeddest = fixup_stack_1 (fixeddest, insn);
2261 temp = gen_reg_rtx (promoted_mode);
2263 emit_insn_after (gen_move_insn (fixeddest,
2264 gen_lowpart (GET_MODE (fixeddest),
2265 temp)),
2266 insn);
2268 SET_DEST (x) = temp;
2272 default:
2273 break;
2276 /* Nothing special about this RTX; fix its operands. */
2278 fmt = GET_RTX_FORMAT (code);
2279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2281 if (fmt[i] == 'e')
2282 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2283 if (fmt[i] == 'E')
2285 register int j;
2286 for (j = 0; j < XVECLEN (x, i); j++)
2287 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2288 insn, replacements);
2293 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2294 return an rtx (MEM:m1 newaddr) which is equivalent.
2295 If any insns must be emitted to compute NEWADDR, put them before INSN.
2297 UNCRITICAL nonzero means accept paradoxical subregs.
2298 This is used for subregs found inside REG_NOTES. */
2300 static rtx
2301 fixup_memory_subreg (x, insn, uncritical)
2302 rtx x;
2303 rtx insn;
2304 int uncritical;
2306 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2307 rtx addr = XEXP (SUBREG_REG (x), 0);
2308 enum machine_mode mode = GET_MODE (x);
2309 rtx result;
2311 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2312 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2313 && ! uncritical)
2314 abort ();
2316 if (BYTES_BIG_ENDIAN)
2317 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2318 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2319 addr = plus_constant (addr, offset);
2320 if (!flag_force_addr && memory_address_p (mode, addr))
2321 /* Shortcut if no insns need be emitted. */
2322 return change_address (SUBREG_REG (x), mode, addr);
2323 start_sequence ();
2324 result = change_address (SUBREG_REG (x), mode, addr);
2325 emit_insn_before (gen_sequence (), insn);
2326 end_sequence ();
2327 return result;
2330 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2331 Replace subexpressions of X in place.
2332 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2333 Otherwise return X, with its contents possibly altered.
2335 If any insns must be emitted to compute NEWADDR, put them before INSN.
2337 UNCRITICAL is as in fixup_memory_subreg. */
2339 static rtx
2340 walk_fixup_memory_subreg (x, insn, uncritical)
2341 register rtx x;
2342 rtx insn;
2343 int uncritical;
2345 register enum rtx_code code;
2346 register const char *fmt;
2347 register int i;
2349 if (x == 0)
2350 return 0;
2352 code = GET_CODE (x);
2354 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2355 return fixup_memory_subreg (x, insn, uncritical);
2357 /* Nothing special about this RTX; fix its operands. */
2359 fmt = GET_RTX_FORMAT (code);
2360 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2362 if (fmt[i] == 'e')
2363 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2364 if (fmt[i] == 'E')
2366 register int j;
2367 for (j = 0; j < XVECLEN (x, i); j++)
2368 XVECEXP (x, i, j)
2369 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2372 return x;
2375 /* For each memory ref within X, if it refers to a stack slot
2376 with an out of range displacement, put the address in a temp register
2377 (emitting new insns before INSN to load these registers)
2378 and alter the memory ref to use that register.
2379 Replace each such MEM rtx with a copy, to avoid clobberage. */
2381 static rtx
2382 fixup_stack_1 (x, insn)
2383 rtx x;
2384 rtx insn;
2386 register int i;
2387 register RTX_CODE code = GET_CODE (x);
2388 register const char *fmt;
2390 if (code == MEM)
2392 register rtx ad = XEXP (x, 0);
2393 /* If we have address of a stack slot but it's not valid
2394 (displacement is too large), compute the sum in a register. */
2395 if (GET_CODE (ad) == PLUS
2396 && GET_CODE (XEXP (ad, 0)) == REG
2397 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2398 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2399 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2400 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2401 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2402 #endif
2403 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2404 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2405 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2406 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2408 rtx temp, seq;
2409 if (memory_address_p (GET_MODE (x), ad))
2410 return x;
2412 start_sequence ();
2413 temp = copy_to_reg (ad);
2414 seq = gen_sequence ();
2415 end_sequence ();
2416 emit_insn_before (seq, insn);
2417 return change_address (x, VOIDmode, temp);
2419 return x;
2422 fmt = GET_RTX_FORMAT (code);
2423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2425 if (fmt[i] == 'e')
2426 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2427 if (fmt[i] == 'E')
2429 register int j;
2430 for (j = 0; j < XVECLEN (x, i); j++)
2431 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2434 return x;
2437 /* Optimization: a bit-field instruction whose field
2438 happens to be a byte or halfword in memory
2439 can be changed to a move instruction.
2441 We call here when INSN is an insn to examine or store into a bit-field.
2442 BODY is the SET-rtx to be altered.
2444 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2445 (Currently this is called only from function.c, and EQUIV_MEM
2446 is always 0.) */
2448 static void
2449 optimize_bit_field (body, insn, equiv_mem)
2450 rtx body;
2451 rtx insn;
2452 rtx *equiv_mem;
2454 register rtx bitfield;
2455 int destflag;
2456 rtx seq = 0;
2457 enum machine_mode mode;
2459 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2460 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2461 bitfield = SET_DEST (body), destflag = 1;
2462 else
2463 bitfield = SET_SRC (body), destflag = 0;
2465 /* First check that the field being stored has constant size and position
2466 and is in fact a byte or halfword suitably aligned. */
2468 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2469 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2470 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2471 != BLKmode)
2472 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2474 register rtx memref = 0;
2476 /* Now check that the containing word is memory, not a register,
2477 and that it is safe to change the machine mode. */
2479 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2480 memref = XEXP (bitfield, 0);
2481 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2482 && equiv_mem != 0)
2483 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2484 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2485 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2486 memref = SUBREG_REG (XEXP (bitfield, 0));
2487 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2488 && equiv_mem != 0
2489 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2490 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2492 if (memref
2493 && ! mode_dependent_address_p (XEXP (memref, 0))
2494 && ! MEM_VOLATILE_P (memref))
2496 /* Now adjust the address, first for any subreg'ing
2497 that we are now getting rid of,
2498 and then for which byte of the word is wanted. */
2500 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2501 rtx insns;
2503 /* Adjust OFFSET to count bits from low-address byte. */
2504 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2505 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2506 - offset - INTVAL (XEXP (bitfield, 1)));
2508 /* Adjust OFFSET to count bytes from low-address byte. */
2509 offset /= BITS_PER_UNIT;
2510 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2512 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2513 if (BYTES_BIG_ENDIAN)
2514 offset -= (MIN (UNITS_PER_WORD,
2515 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2516 - MIN (UNITS_PER_WORD,
2517 GET_MODE_SIZE (GET_MODE (memref))));
2520 start_sequence ();
2521 memref = change_address (memref, mode,
2522 plus_constant (XEXP (memref, 0), offset));
2523 insns = get_insns ();
2524 end_sequence ();
2525 emit_insns_before (insns, insn);
2527 /* Store this memory reference where
2528 we found the bit field reference. */
2530 if (destflag)
2532 validate_change (insn, &SET_DEST (body), memref, 1);
2533 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2535 rtx src = SET_SRC (body);
2536 while (GET_CODE (src) == SUBREG
2537 && SUBREG_WORD (src) == 0)
2538 src = SUBREG_REG (src);
2539 if (GET_MODE (src) != GET_MODE (memref))
2540 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2541 validate_change (insn, &SET_SRC (body), src, 1);
2543 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2544 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2545 /* This shouldn't happen because anything that didn't have
2546 one of these modes should have got converted explicitly
2547 and then referenced through a subreg.
2548 This is so because the original bit-field was
2549 handled by agg_mode and so its tree structure had
2550 the same mode that memref now has. */
2551 abort ();
2553 else
2555 rtx dest = SET_DEST (body);
2557 while (GET_CODE (dest) == SUBREG
2558 && SUBREG_WORD (dest) == 0
2559 && (GET_MODE_CLASS (GET_MODE (dest))
2560 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2561 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2562 <= UNITS_PER_WORD))
2563 dest = SUBREG_REG (dest);
2565 validate_change (insn, &SET_DEST (body), dest, 1);
2567 if (GET_MODE (dest) == GET_MODE (memref))
2568 validate_change (insn, &SET_SRC (body), memref, 1);
2569 else
2571 /* Convert the mem ref to the destination mode. */
2572 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2574 start_sequence ();
2575 convert_move (newreg, memref,
2576 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2577 seq = get_insns ();
2578 end_sequence ();
2580 validate_change (insn, &SET_SRC (body), newreg, 1);
2584 /* See if we can convert this extraction or insertion into
2585 a simple move insn. We might not be able to do so if this
2586 was, for example, part of a PARALLEL.
2588 If we succeed, write out any needed conversions. If we fail,
2589 it is hard to guess why we failed, so don't do anything
2590 special; just let the optimization be suppressed. */
2592 if (apply_change_group () && seq)
2593 emit_insns_before (seq, insn);
2598 /* These routines are responsible for converting virtual register references
2599 to the actual hard register references once RTL generation is complete.
2601 The following four variables are used for communication between the
2602 routines. They contain the offsets of the virtual registers from their
2603 respective hard registers. */
2605 static int in_arg_offset;
2606 static int var_offset;
2607 static int dynamic_offset;
2608 static int out_arg_offset;
2609 static int cfa_offset;
2611 /* In most machines, the stack pointer register is equivalent to the bottom
2612 of the stack. */
2614 #ifndef STACK_POINTER_OFFSET
2615 #define STACK_POINTER_OFFSET 0
2616 #endif
2618 /* If not defined, pick an appropriate default for the offset of dynamically
2619 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2620 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2622 #ifndef STACK_DYNAMIC_OFFSET
2624 #ifdef ACCUMULATE_OUTGOING_ARGS
2625 /* The bottom of the stack points to the actual arguments. If
2626 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2627 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2628 stack space for register parameters is not pushed by the caller, but
2629 rather part of the fixed stack areas and hence not included in
2630 `current_function_outgoing_args_size'. Nevertheless, we must allow
2631 for it when allocating stack dynamic objects. */
2633 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2634 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2635 (current_function_outgoing_args_size \
2636 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2638 #else
2639 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2640 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2641 #endif
2643 #else
2644 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2645 #endif
2646 #endif
2648 /* On a few machines, the CFA coincides with the arg pointer. */
2650 #ifndef ARG_POINTER_CFA_OFFSET
2651 #define ARG_POINTER_CFA_OFFSET 0
2652 #endif
2655 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2656 its address taken. DECL is the decl for the object stored in the
2657 register, for later use if we do need to force REG into the stack.
2658 REG is overwritten by the MEM like in put_reg_into_stack. */
2661 gen_mem_addressof (reg, decl)
2662 rtx reg;
2663 tree decl;
2665 tree type = TREE_TYPE (decl);
2666 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2667 REGNO (reg), decl);
2668 /* If the original REG was a user-variable, then so is the REG whose
2669 address is being taken. */
2670 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2672 PUT_CODE (reg, MEM);
2673 PUT_MODE (reg, DECL_MODE (decl));
2674 XEXP (reg, 0) = r;
2675 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2676 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2677 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2679 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2680 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2682 return reg;
2685 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2687 #if 0
2688 void
2689 flush_addressof (decl)
2690 tree decl;
2692 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2693 && DECL_RTL (decl) != 0
2694 && GET_CODE (DECL_RTL (decl)) == MEM
2695 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2696 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2697 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2699 #endif
2701 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2703 static void
2704 put_addressof_into_stack (r, ht)
2705 rtx r;
2706 struct hash_table *ht;
2708 tree decl = ADDRESSOF_DECL (r);
2709 rtx reg = XEXP (r, 0);
2711 if (GET_CODE (reg) != REG)
2712 abort ();
2714 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2715 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2716 ADDRESSOF_REGNO (r),
2717 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2720 /* List of replacements made below in purge_addressof_1 when creating
2721 bitfield insertions. */
2722 static rtx purge_bitfield_addressof_replacements;
2724 /* List of replacements made below in purge_addressof_1 for patterns
2725 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2726 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2727 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2728 enough in complex cases, e.g. when some field values can be
2729 extracted by usage MEM with narrower mode. */
2730 static rtx purge_addressof_replacements;
2732 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2733 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2734 the stack. */
2736 static void
2737 purge_addressof_1 (loc, insn, force, store, ht)
2738 rtx *loc;
2739 rtx insn;
2740 int force, store;
2741 struct hash_table *ht;
2743 rtx x;
2744 RTX_CODE code;
2745 int i, j;
2746 const char *fmt;
2748 /* Re-start here to avoid recursion in common cases. */
2749 restart:
2751 x = *loc;
2752 if (x == 0)
2753 return;
2755 code = GET_CODE (x);
2757 /* If we don't return in any of the cases below, we will recurse inside
2758 the RTX, which will normally result in any ADDRESSOF being forced into
2759 memory. */
2760 if (code == SET)
2762 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2763 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2764 return;
2767 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2769 /* We must create a copy of the rtx because it was created by
2770 overwriting a REG rtx which is always shared. */
2771 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2772 rtx insns;
2774 if (validate_change (insn, loc, sub, 0)
2775 || validate_replace_rtx (x, sub, insn))
2776 return;
2778 start_sequence ();
2779 sub = force_operand (sub, NULL_RTX);
2780 if (! validate_change (insn, loc, sub, 0)
2781 && ! validate_replace_rtx (x, sub, insn))
2782 abort ();
2784 insns = gen_sequence ();
2785 end_sequence ();
2786 emit_insn_before (insns, insn);
2787 return;
2790 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2792 rtx sub = XEXP (XEXP (x, 0), 0);
2793 rtx sub2;
2795 if (GET_CODE (sub) == MEM)
2797 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2798 MEM_COPY_ATTRIBUTES (sub2, sub);
2799 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2800 sub = sub2;
2802 else if (GET_CODE (sub) == REG
2803 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2805 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2807 int size_x, size_sub;
2809 if (!insn)
2811 /* When processing REG_NOTES look at the list of
2812 replacements done on the insn to find the register that X
2813 was replaced by. */
2814 rtx tem;
2816 for (tem = purge_bitfield_addressof_replacements;
2817 tem != NULL_RTX;
2818 tem = XEXP (XEXP (tem, 1), 1))
2819 if (rtx_equal_p (x, XEXP (tem, 0)))
2821 *loc = XEXP (XEXP (tem, 1), 0);
2822 return;
2825 /* See comment for purge_addressof_replacements. */
2826 for (tem = purge_addressof_replacements;
2827 tem != NULL_RTX;
2828 tem = XEXP (XEXP (tem, 1), 1))
2829 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2831 rtx z = XEXP (XEXP (tem, 1), 0);
2833 if (GET_MODE (x) == GET_MODE (z)
2834 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2835 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2836 abort ();
2838 /* It can happen that the note may speak of things
2839 in a wider (or just different) mode than the
2840 code did. This is especially true of
2841 REG_RETVAL. */
2843 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2844 z = SUBREG_REG (z);
2846 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2847 && (GET_MODE_SIZE (GET_MODE (x))
2848 > GET_MODE_SIZE (GET_MODE (z))))
2850 /* This can occur as a result in invalid
2851 pointer casts, e.g. float f; ...
2852 *(long long int *)&f.
2853 ??? We could emit a warning here, but
2854 without a line number that wouldn't be
2855 very helpful. */
2856 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2858 else
2859 z = gen_lowpart (GET_MODE (x), z);
2861 *loc = z;
2862 return;
2865 /* There should always be such a replacement. */
2866 abort ();
2869 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2870 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2872 /* Don't even consider working with paradoxical subregs,
2873 or the moral equivalent seen here. */
2874 if (size_x <= size_sub
2875 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2877 /* Do a bitfield insertion to mirror what would happen
2878 in memory. */
2880 rtx val, seq;
2882 if (store)
2884 rtx p = PREV_INSN (insn);
2886 start_sequence ();
2887 val = gen_reg_rtx (GET_MODE (x));
2888 if (! validate_change (insn, loc, val, 0))
2890 /* Discard the current sequence and put the
2891 ADDRESSOF on stack. */
2892 end_sequence ();
2893 goto give_up;
2895 seq = gen_sequence ();
2896 end_sequence ();
2897 emit_insn_before (seq, insn);
2898 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2899 insn, ht);
2901 start_sequence ();
2902 store_bit_field (sub, size_x, 0, GET_MODE (x),
2903 val, GET_MODE_SIZE (GET_MODE (sub)),
2904 GET_MODE_SIZE (GET_MODE (sub)));
2906 /* Make sure to unshare any shared rtl that store_bit_field
2907 might have created. */
2908 for (p = get_insns(); p; p = NEXT_INSN (p))
2910 reset_used_flags (PATTERN (p));
2911 reset_used_flags (REG_NOTES (p));
2912 reset_used_flags (LOG_LINKS (p));
2914 unshare_all_rtl (get_insns ());
2916 seq = gen_sequence ();
2917 end_sequence ();
2918 p = emit_insn_after (seq, insn);
2919 if (NEXT_INSN (insn))
2920 compute_insns_for_mem (NEXT_INSN (insn),
2921 p ? NEXT_INSN (p) : NULL_RTX,
2922 ht);
2924 else
2926 rtx p = PREV_INSN (insn);
2928 start_sequence ();
2929 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2930 GET_MODE (x), GET_MODE (x),
2931 GET_MODE_SIZE (GET_MODE (sub)),
2932 GET_MODE_SIZE (GET_MODE (sub)));
2934 if (! validate_change (insn, loc, val, 0))
2936 /* Discard the current sequence and put the
2937 ADDRESSOF on stack. */
2938 end_sequence ();
2939 goto give_up;
2942 seq = gen_sequence ();
2943 end_sequence ();
2944 emit_insn_before (seq, insn);
2945 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2946 insn, ht);
2949 /* Remember the replacement so that the same one can be done
2950 on the REG_NOTES. */
2951 purge_bitfield_addressof_replacements
2952 = gen_rtx_EXPR_LIST (VOIDmode, x,
2953 gen_rtx_EXPR_LIST
2954 (VOIDmode, val,
2955 purge_bitfield_addressof_replacements));
2957 /* We replaced with a reg -- all done. */
2958 return;
2962 else if (validate_change (insn, loc, sub, 0))
2964 /* Remember the replacement so that the same one can be done
2965 on the REG_NOTES. */
2966 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2968 rtx tem;
2970 for (tem = purge_addressof_replacements;
2971 tem != NULL_RTX;
2972 tem = XEXP (XEXP (tem, 1), 1))
2973 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2975 XEXP (XEXP (tem, 1), 0) = sub;
2976 return;
2978 purge_addressof_replacements
2979 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2980 gen_rtx_EXPR_LIST (VOIDmode, sub,
2981 purge_addressof_replacements));
2982 return;
2984 goto restart;
2986 give_up:;
2987 /* else give up and put it into the stack */
2990 else if (code == ADDRESSOF)
2992 put_addressof_into_stack (x, ht);
2993 return;
2995 else if (code == SET)
2997 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2998 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2999 return;
3002 /* Scan all subexpressions. */
3003 fmt = GET_RTX_FORMAT (code);
3004 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3006 if (*fmt == 'e')
3007 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3008 else if (*fmt == 'E')
3009 for (j = 0; j < XVECLEN (x, i); j++)
3010 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3014 /* Return a new hash table entry in HT. */
3016 static struct hash_entry *
3017 insns_for_mem_newfunc (he, ht, k)
3018 struct hash_entry *he;
3019 struct hash_table *ht;
3020 hash_table_key k ATTRIBUTE_UNUSED;
3022 struct insns_for_mem_entry *ifmhe;
3023 if (he)
3024 return he;
3026 ifmhe = ((struct insns_for_mem_entry *)
3027 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3028 ifmhe->insns = NULL_RTX;
3030 return &ifmhe->he;
3033 /* Return a hash value for K, a REG. */
3035 static unsigned long
3036 insns_for_mem_hash (k)
3037 hash_table_key k;
3039 /* K is really a RTX. Just use the address as the hash value. */
3040 return (unsigned long) k;
3043 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3045 static boolean
3046 insns_for_mem_comp (k1, k2)
3047 hash_table_key k1;
3048 hash_table_key k2;
3050 return k1 == k2;
3053 struct insns_for_mem_walk_info {
3054 /* The hash table that we are using to record which INSNs use which
3055 MEMs. */
3056 struct hash_table *ht;
3058 /* The INSN we are currently proessing. */
3059 rtx insn;
3061 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3062 to find the insns that use the REGs in the ADDRESSOFs. */
3063 int pass;
3066 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3067 that might be used in an ADDRESSOF expression, record this INSN in
3068 the hash table given by DATA (which is really a pointer to an
3069 insns_for_mem_walk_info structure). */
3071 static int
3072 insns_for_mem_walk (r, data)
3073 rtx *r;
3074 void *data;
3076 struct insns_for_mem_walk_info *ifmwi
3077 = (struct insns_for_mem_walk_info *) data;
3079 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3080 && GET_CODE (XEXP (*r, 0)) == REG)
3081 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3082 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3084 /* Lookup this MEM in the hashtable, creating it if necessary. */
3085 struct insns_for_mem_entry *ifme
3086 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3088 /*create=*/0,
3089 /*copy=*/0);
3091 /* If we have not already recorded this INSN, do so now. Since
3092 we process the INSNs in order, we know that if we have
3093 recorded it it must be at the front of the list. */
3094 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3096 /* We do the allocation on the same obstack as is used for
3097 the hash table since this memory will not be used once
3098 the hash table is deallocated. */
3099 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3100 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3101 ifme->insns);
3102 pop_obstacks ();
3106 return 0;
3109 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3110 which REGs in HT. */
3112 static void
3113 compute_insns_for_mem (insns, last_insn, ht)
3114 rtx insns;
3115 rtx last_insn;
3116 struct hash_table *ht;
3118 rtx insn;
3119 struct insns_for_mem_walk_info ifmwi;
3120 ifmwi.ht = ht;
3122 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3123 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3124 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3126 ifmwi.insn = insn;
3127 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3131 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3132 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3133 stack. */
3135 void
3136 purge_addressof (insns)
3137 rtx insns;
3139 rtx insn;
3140 struct hash_table ht;
3142 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3143 requires a fixup pass over the instruction stream to correct
3144 INSNs that depended on the REG being a REG, and not a MEM. But,
3145 these fixup passes are slow. Furthermore, more MEMs are not
3146 mentioned in very many instructions. So, we speed up the process
3147 by pre-calculating which REGs occur in which INSNs; that allows
3148 us to perform the fixup passes much more quickly. */
3149 hash_table_init (&ht,
3150 insns_for_mem_newfunc,
3151 insns_for_mem_hash,
3152 insns_for_mem_comp);
3153 compute_insns_for_mem (insns, NULL_RTX, &ht);
3155 for (insn = insns; insn; insn = NEXT_INSN (insn))
3156 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3157 || GET_CODE (insn) == CALL_INSN)
3159 purge_addressof_1 (&PATTERN (insn), insn,
3160 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3161 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
3164 /* Clean up. */
3165 hash_table_free (&ht);
3166 purge_bitfield_addressof_replacements = 0;
3167 purge_addressof_replacements = 0;
3170 /* Pass through the INSNS of function FNDECL and convert virtual register
3171 references to hard register references. */
3173 void
3174 instantiate_virtual_regs (fndecl, insns)
3175 tree fndecl;
3176 rtx insns;
3178 rtx insn;
3179 int i;
3181 /* Compute the offsets to use for this function. */
3182 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3183 var_offset = STARTING_FRAME_OFFSET;
3184 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3185 out_arg_offset = STACK_POINTER_OFFSET;
3186 cfa_offset = ARG_POINTER_CFA_OFFSET;
3188 /* Scan all variables and parameters of this function. For each that is
3189 in memory, instantiate all virtual registers if the result is a valid
3190 address. If not, we do it later. That will handle most uses of virtual
3191 regs on many machines. */
3192 instantiate_decls (fndecl, 1);
3194 /* Initialize recognition, indicating that volatile is OK. */
3195 init_recog ();
3197 /* Scan through all the insns, instantiating every virtual register still
3198 present. */
3199 for (insn = insns; insn; insn = NEXT_INSN (insn))
3200 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3201 || GET_CODE (insn) == CALL_INSN)
3203 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3204 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3207 /* Instantiate the stack slots for the parm registers, for later use in
3208 addressof elimination. */
3209 for (i = 0; i < max_parm_reg; ++i)
3210 if (parm_reg_stack_loc[i])
3211 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3213 /* Now instantiate the remaining register equivalences for debugging info.
3214 These will not be valid addresses. */
3215 instantiate_decls (fndecl, 0);
3217 /* Indicate that, from now on, assign_stack_local should use
3218 frame_pointer_rtx. */
3219 virtuals_instantiated = 1;
3222 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3223 all virtual registers in their DECL_RTL's.
3225 If VALID_ONLY, do this only if the resulting address is still valid.
3226 Otherwise, always do it. */
3228 static void
3229 instantiate_decls (fndecl, valid_only)
3230 tree fndecl;
3231 int valid_only;
3233 tree decl;
3235 if (DECL_SAVED_INSNS (fndecl))
3236 /* When compiling an inline function, the obstack used for
3237 rtl allocation is the maybepermanent_obstack. Calling
3238 `resume_temporary_allocation' switches us back to that
3239 obstack while we process this function's parameters. */
3240 resume_temporary_allocation ();
3242 /* Process all parameters of the function. */
3243 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3245 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3247 instantiate_decl (DECL_RTL (decl), size, valid_only);
3249 /* If the parameter was promoted, then the incoming RTL mode may be
3250 larger than the declared type size. We must use the larger of
3251 the two sizes. */
3252 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3253 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3256 /* Now process all variables defined in the function or its subblocks. */
3257 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3259 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3261 /* Save all rtl allocated for this function by raising the
3262 high-water mark on the maybepermanent_obstack. */
3263 preserve_data ();
3264 /* All further rtl allocation is now done in the current_obstack. */
3265 rtl_in_current_obstack ();
3269 /* Subroutine of instantiate_decls: Process all decls in the given
3270 BLOCK node and all its subblocks. */
3272 static void
3273 instantiate_decls_1 (let, valid_only)
3274 tree let;
3275 int valid_only;
3277 tree t;
3279 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3280 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3281 valid_only);
3283 /* Process all subblocks. */
3284 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3285 instantiate_decls_1 (t, valid_only);
3288 /* Subroutine of the preceding procedures: Given RTL representing a
3289 decl and the size of the object, do any instantiation required.
3291 If VALID_ONLY is non-zero, it means that the RTL should only be
3292 changed if the new address is valid. */
3294 static void
3295 instantiate_decl (x, size, valid_only)
3296 rtx x;
3297 int size;
3298 int valid_only;
3300 enum machine_mode mode;
3301 rtx addr;
3303 /* If this is not a MEM, no need to do anything. Similarly if the
3304 address is a constant or a register that is not a virtual register. */
3306 if (x == 0 || GET_CODE (x) != MEM)
3307 return;
3309 addr = XEXP (x, 0);
3310 if (CONSTANT_P (addr)
3311 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3312 || (GET_CODE (addr) == REG
3313 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3314 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3315 return;
3317 /* If we should only do this if the address is valid, copy the address.
3318 We need to do this so we can undo any changes that might make the
3319 address invalid. This copy is unfortunate, but probably can't be
3320 avoided. */
3322 if (valid_only)
3323 addr = copy_rtx (addr);
3325 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3327 if (valid_only)
3329 /* Now verify that the resulting address is valid for every integer or
3330 floating-point mode up to and including SIZE bytes long. We do this
3331 since the object might be accessed in any mode and frame addresses
3332 are shared. */
3334 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3335 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3336 mode = GET_MODE_WIDER_MODE (mode))
3337 if (! memory_address_p (mode, addr))
3338 return;
3340 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3341 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3342 mode = GET_MODE_WIDER_MODE (mode))
3343 if (! memory_address_p (mode, addr))
3344 return;
3347 /* Put back the address now that we have updated it and we either know
3348 it is valid or we don't care whether it is valid. */
3350 XEXP (x, 0) = addr;
3353 /* Given a pointer to a piece of rtx and an optional pointer to the
3354 containing object, instantiate any virtual registers present in it.
3356 If EXTRA_INSNS, we always do the replacement and generate
3357 any extra insns before OBJECT. If it zero, we do nothing if replacement
3358 is not valid.
3360 Return 1 if we either had nothing to do or if we were able to do the
3361 needed replacement. Return 0 otherwise; we only return zero if
3362 EXTRA_INSNS is zero.
3364 We first try some simple transformations to avoid the creation of extra
3365 pseudos. */
3367 static int
3368 instantiate_virtual_regs_1 (loc, object, extra_insns)
3369 rtx *loc;
3370 rtx object;
3371 int extra_insns;
3373 rtx x;
3374 RTX_CODE code;
3375 rtx new = 0;
3376 HOST_WIDE_INT offset = 0;
3377 rtx temp;
3378 rtx seq;
3379 int i, j;
3380 const char *fmt;
3382 /* Re-start here to avoid recursion in common cases. */
3383 restart:
3385 x = *loc;
3386 if (x == 0)
3387 return 1;
3389 code = GET_CODE (x);
3391 /* Check for some special cases. */
3392 switch (code)
3394 case CONST_INT:
3395 case CONST_DOUBLE:
3396 case CONST:
3397 case SYMBOL_REF:
3398 case CODE_LABEL:
3399 case PC:
3400 case CC0:
3401 case ASM_INPUT:
3402 case ADDR_VEC:
3403 case ADDR_DIFF_VEC:
3404 case RETURN:
3405 return 1;
3407 case SET:
3408 /* We are allowed to set the virtual registers. This means that
3409 the actual register should receive the source minus the
3410 appropriate offset. This is used, for example, in the handling
3411 of non-local gotos. */
3412 if (SET_DEST (x) == virtual_incoming_args_rtx)
3413 new = arg_pointer_rtx, offset = - in_arg_offset;
3414 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3415 new = frame_pointer_rtx, offset = - var_offset;
3416 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3417 new = stack_pointer_rtx, offset = - dynamic_offset;
3418 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3419 new = stack_pointer_rtx, offset = - out_arg_offset;
3420 else if (SET_DEST (x) == virtual_cfa_rtx)
3421 new = arg_pointer_rtx, offset = - cfa_offset;
3423 if (new)
3425 /* The only valid sources here are PLUS or REG. Just do
3426 the simplest possible thing to handle them. */
3427 if (GET_CODE (SET_SRC (x)) != REG
3428 && GET_CODE (SET_SRC (x)) != PLUS)
3429 abort ();
3431 start_sequence ();
3432 if (GET_CODE (SET_SRC (x)) != REG)
3433 temp = force_operand (SET_SRC (x), NULL_RTX);
3434 else
3435 temp = SET_SRC (x);
3436 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3437 seq = get_insns ();
3438 end_sequence ();
3440 emit_insns_before (seq, object);
3441 SET_DEST (x) = new;
3443 if (! validate_change (object, &SET_SRC (x), temp, 0)
3444 || ! extra_insns)
3445 abort ();
3447 return 1;
3450 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3451 loc = &SET_SRC (x);
3452 goto restart;
3454 case PLUS:
3455 /* Handle special case of virtual register plus constant. */
3456 if (CONSTANT_P (XEXP (x, 1)))
3458 rtx old, new_offset;
3460 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3461 if (GET_CODE (XEXP (x, 0)) == PLUS)
3463 rtx inner = XEXP (XEXP (x, 0), 0);
3465 if (inner == virtual_incoming_args_rtx)
3466 new = arg_pointer_rtx, offset = in_arg_offset;
3467 else if (inner == virtual_stack_vars_rtx)
3468 new = frame_pointer_rtx, offset = var_offset;
3469 else if (inner == virtual_stack_dynamic_rtx)
3470 new = stack_pointer_rtx, offset = dynamic_offset;
3471 else if (inner == virtual_outgoing_args_rtx)
3472 new = stack_pointer_rtx, offset = out_arg_offset;
3473 else if (inner == virtual_cfa_rtx)
3474 new = arg_pointer_rtx, offset = cfa_offset;
3475 else
3477 loc = &XEXP (x, 0);
3478 goto restart;
3481 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3482 extra_insns);
3483 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3486 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3487 new = arg_pointer_rtx, offset = in_arg_offset;
3488 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3489 new = frame_pointer_rtx, offset = var_offset;
3490 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3491 new = stack_pointer_rtx, offset = dynamic_offset;
3492 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3493 new = stack_pointer_rtx, offset = out_arg_offset;
3494 else if (XEXP (x, 0) == virtual_cfa_rtx)
3495 new = arg_pointer_rtx, offset = cfa_offset;
3496 else
3498 /* We know the second operand is a constant. Unless the
3499 first operand is a REG (which has been already checked),
3500 it needs to be checked. */
3501 if (GET_CODE (XEXP (x, 0)) != REG)
3503 loc = &XEXP (x, 0);
3504 goto restart;
3506 return 1;
3509 new_offset = plus_constant (XEXP (x, 1), offset);
3511 /* If the new constant is zero, try to replace the sum with just
3512 the register. */
3513 if (new_offset == const0_rtx
3514 && validate_change (object, loc, new, 0))
3515 return 1;
3517 /* Next try to replace the register and new offset.
3518 There are two changes to validate here and we can't assume that
3519 in the case of old offset equals new just changing the register
3520 will yield a valid insn. In the interests of a little efficiency,
3521 however, we only call validate change once (we don't queue up the
3522 changes and then call apply_change_group). */
3524 old = XEXP (x, 0);
3525 if (offset == 0
3526 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3527 : (XEXP (x, 0) = new,
3528 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3530 if (! extra_insns)
3532 XEXP (x, 0) = old;
3533 return 0;
3536 /* Otherwise copy the new constant into a register and replace
3537 constant with that register. */
3538 temp = gen_reg_rtx (Pmode);
3539 XEXP (x, 0) = new;
3540 if (validate_change (object, &XEXP (x, 1), temp, 0))
3541 emit_insn_before (gen_move_insn (temp, new_offset), object);
3542 else
3544 /* If that didn't work, replace this expression with a
3545 register containing the sum. */
3547 XEXP (x, 0) = old;
3548 new = gen_rtx_PLUS (Pmode, new, new_offset);
3550 start_sequence ();
3551 temp = force_operand (new, NULL_RTX);
3552 seq = get_insns ();
3553 end_sequence ();
3555 emit_insns_before (seq, object);
3556 if (! validate_change (object, loc, temp, 0)
3557 && ! validate_replace_rtx (x, temp, object))
3558 abort ();
3562 return 1;
3565 /* Fall through to generic two-operand expression case. */
3566 case EXPR_LIST:
3567 case CALL:
3568 case COMPARE:
3569 case MINUS:
3570 case MULT:
3571 case DIV: case UDIV:
3572 case MOD: case UMOD:
3573 case AND: case IOR: case XOR:
3574 case ROTATERT: case ROTATE:
3575 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3576 case NE: case EQ:
3577 case GE: case GT: case GEU: case GTU:
3578 case LE: case LT: case LEU: case LTU:
3579 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3580 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3581 loc = &XEXP (x, 0);
3582 goto restart;
3584 case MEM:
3585 /* Most cases of MEM that convert to valid addresses have already been
3586 handled by our scan of decls. The only special handling we
3587 need here is to make a copy of the rtx to ensure it isn't being
3588 shared if we have to change it to a pseudo.
3590 If the rtx is a simple reference to an address via a virtual register,
3591 it can potentially be shared. In such cases, first try to make it
3592 a valid address, which can also be shared. Otherwise, copy it and
3593 proceed normally.
3595 First check for common cases that need no processing. These are
3596 usually due to instantiation already being done on a previous instance
3597 of a shared rtx. */
3599 temp = XEXP (x, 0);
3600 if (CONSTANT_ADDRESS_P (temp)
3601 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3602 || temp == arg_pointer_rtx
3603 #endif
3604 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3605 || temp == hard_frame_pointer_rtx
3606 #endif
3607 || temp == frame_pointer_rtx)
3608 return 1;
3610 if (GET_CODE (temp) == PLUS
3611 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3612 && (XEXP (temp, 0) == frame_pointer_rtx
3613 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3614 || XEXP (temp, 0) == hard_frame_pointer_rtx
3615 #endif
3616 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3617 || XEXP (temp, 0) == arg_pointer_rtx
3618 #endif
3620 return 1;
3622 if (temp == virtual_stack_vars_rtx
3623 || temp == virtual_incoming_args_rtx
3624 || (GET_CODE (temp) == PLUS
3625 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3626 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3627 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3629 /* This MEM may be shared. If the substitution can be done without
3630 the need to generate new pseudos, we want to do it in place
3631 so all copies of the shared rtx benefit. The call below will
3632 only make substitutions if the resulting address is still
3633 valid.
3635 Note that we cannot pass X as the object in the recursive call
3636 since the insn being processed may not allow all valid
3637 addresses. However, if we were not passed on object, we can
3638 only modify X without copying it if X will have a valid
3639 address.
3641 ??? Also note that this can still lose if OBJECT is an insn that
3642 has less restrictions on an address that some other insn.
3643 In that case, we will modify the shared address. This case
3644 doesn't seem very likely, though. One case where this could
3645 happen is in the case of a USE or CLOBBER reference, but we
3646 take care of that below. */
3648 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3649 object ? object : x, 0))
3650 return 1;
3652 /* Otherwise make a copy and process that copy. We copy the entire
3653 RTL expression since it might be a PLUS which could also be
3654 shared. */
3655 *loc = x = copy_rtx (x);
3658 /* Fall through to generic unary operation case. */
3659 case SUBREG:
3660 case STRICT_LOW_PART:
3661 case NEG: case NOT:
3662 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3663 case SIGN_EXTEND: case ZERO_EXTEND:
3664 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3665 case FLOAT: case FIX:
3666 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3667 case ABS:
3668 case SQRT:
3669 case FFS:
3670 /* These case either have just one operand or we know that we need not
3671 check the rest of the operands. */
3672 loc = &XEXP (x, 0);
3673 goto restart;
3675 case USE:
3676 case CLOBBER:
3677 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3678 go ahead and make the invalid one, but do it to a copy. For a REG,
3679 just make the recursive call, since there's no chance of a problem. */
3681 if ((GET_CODE (XEXP (x, 0)) == MEM
3682 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3684 || (GET_CODE (XEXP (x, 0)) == REG
3685 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3686 return 1;
3688 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3689 loc = &XEXP (x, 0);
3690 goto restart;
3692 case REG:
3693 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3694 in front of this insn and substitute the temporary. */
3695 if (x == virtual_incoming_args_rtx)
3696 new = arg_pointer_rtx, offset = in_arg_offset;
3697 else if (x == virtual_stack_vars_rtx)
3698 new = frame_pointer_rtx, offset = var_offset;
3699 else if (x == virtual_stack_dynamic_rtx)
3700 new = stack_pointer_rtx, offset = dynamic_offset;
3701 else if (x == virtual_outgoing_args_rtx)
3702 new = stack_pointer_rtx, offset = out_arg_offset;
3703 else if (x == virtual_cfa_rtx)
3704 new = arg_pointer_rtx, offset = cfa_offset;
3706 if (new)
3708 temp = plus_constant (new, offset);
3709 if (!validate_change (object, loc, temp, 0))
3711 if (! extra_insns)
3712 return 0;
3714 start_sequence ();
3715 temp = force_operand (temp, NULL_RTX);
3716 seq = get_insns ();
3717 end_sequence ();
3719 emit_insns_before (seq, object);
3720 if (! validate_change (object, loc, temp, 0)
3721 && ! validate_replace_rtx (x, temp, object))
3722 abort ();
3726 return 1;
3728 case ADDRESSOF:
3729 if (GET_CODE (XEXP (x, 0)) == REG)
3730 return 1;
3732 else if (GET_CODE (XEXP (x, 0)) == MEM)
3734 /* If we have a (addressof (mem ..)), do any instantiation inside
3735 since we know we'll be making the inside valid when we finally
3736 remove the ADDRESSOF. */
3737 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3738 return 1;
3740 break;
3742 default:
3743 break;
3746 /* Scan all subexpressions. */
3747 fmt = GET_RTX_FORMAT (code);
3748 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3749 if (*fmt == 'e')
3751 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3752 return 0;
3754 else if (*fmt == 'E')
3755 for (j = 0; j < XVECLEN (x, i); j++)
3756 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3757 extra_insns))
3758 return 0;
3760 return 1;
3763 /* Optimization: assuming this function does not receive nonlocal gotos,
3764 delete the handlers for such, as well as the insns to establish
3765 and disestablish them. */
3767 static void
3768 delete_handlers ()
3770 rtx insn;
3771 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3773 /* Delete the handler by turning off the flag that would
3774 prevent jump_optimize from deleting it.
3775 Also permit deletion of the nonlocal labels themselves
3776 if nothing local refers to them. */
3777 if (GET_CODE (insn) == CODE_LABEL)
3779 tree t, last_t;
3781 LABEL_PRESERVE_P (insn) = 0;
3783 /* Remove it from the nonlocal_label list, to avoid confusing
3784 flow. */
3785 for (t = nonlocal_labels, last_t = 0; t;
3786 last_t = t, t = TREE_CHAIN (t))
3787 if (DECL_RTL (TREE_VALUE (t)) == insn)
3788 break;
3789 if (t)
3791 if (! last_t)
3792 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3793 else
3794 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3797 if (GET_CODE (insn) == INSN)
3799 int can_delete = 0;
3800 rtx t;
3801 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3802 if (reg_mentioned_p (t, PATTERN (insn)))
3804 can_delete = 1;
3805 break;
3807 if (can_delete
3808 || (nonlocal_goto_stack_level != 0
3809 && reg_mentioned_p (nonlocal_goto_stack_level,
3810 PATTERN (insn))))
3811 delete_insn (insn);
3816 /* Output a USE for any register use in RTL.
3817 This is used with -noreg to mark the extent of lifespan
3818 of any registers used in a user-visible variable's DECL_RTL. */
3820 void
3821 use_variable (rtl)
3822 rtx rtl;
3824 if (GET_CODE (rtl) == REG)
3825 /* This is a register variable. */
3826 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3827 else if (GET_CODE (rtl) == MEM
3828 && GET_CODE (XEXP (rtl, 0)) == REG
3829 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3830 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3831 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3832 /* This is a variable-sized structure. */
3833 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3836 /* Like use_variable except that it outputs the USEs after INSN
3837 instead of at the end of the insn-chain. */
3839 void
3840 use_variable_after (rtl, insn)
3841 rtx rtl, insn;
3843 if (GET_CODE (rtl) == REG)
3844 /* This is a register variable. */
3845 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3846 else if (GET_CODE (rtl) == MEM
3847 && GET_CODE (XEXP (rtl, 0)) == REG
3848 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3849 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3850 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3851 /* This is a variable-sized structure. */
3852 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3856 max_parm_reg_num ()
3858 return max_parm_reg;
3861 /* Return the first insn following those generated by `assign_parms'. */
3864 get_first_nonparm_insn ()
3866 if (last_parm_insn)
3867 return NEXT_INSN (last_parm_insn);
3868 return get_insns ();
3871 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3872 Crash if there is none. */
3875 get_first_block_beg ()
3877 register rtx searcher;
3878 register rtx insn = get_first_nonparm_insn ();
3880 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3881 if (GET_CODE (searcher) == NOTE
3882 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3883 return searcher;
3885 abort (); /* Invalid call to this function. (See comments above.) */
3886 return NULL_RTX;
3889 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3890 This means a type for which function calls must pass an address to the
3891 function or get an address back from the function.
3892 EXP may be a type node or an expression (whose type is tested). */
3895 aggregate_value_p (exp)
3896 tree exp;
3898 int i, regno, nregs;
3899 rtx reg;
3900 tree type;
3901 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3902 type = exp;
3903 else
3904 type = TREE_TYPE (exp);
3906 if (RETURN_IN_MEMORY (type))
3907 return 1;
3908 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3909 and thus can't be returned in registers. */
3910 if (TREE_ADDRESSABLE (type))
3911 return 1;
3912 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3913 return 1;
3914 /* Make sure we have suitable call-clobbered regs to return
3915 the value in; if not, we must return it in memory. */
3916 reg = hard_function_value (type, 0);
3918 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3919 it is OK. */
3920 if (GET_CODE (reg) != REG)
3921 return 0;
3923 regno = REGNO (reg);
3924 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3925 for (i = 0; i < nregs; i++)
3926 if (! call_used_regs[regno + i])
3927 return 1;
3928 return 0;
3931 /* Assign RTL expressions to the function's parameters.
3932 This may involve copying them into registers and using
3933 those registers as the RTL for them. */
3935 void
3936 assign_parms (fndecl)
3937 tree fndecl;
3939 register tree parm;
3940 register rtx entry_parm = 0;
3941 register rtx stack_parm = 0;
3942 CUMULATIVE_ARGS args_so_far;
3943 enum machine_mode promoted_mode, passed_mode;
3944 enum machine_mode nominal_mode, promoted_nominal_mode;
3945 int unsignedp;
3946 /* Total space needed so far for args on the stack,
3947 given as a constant and a tree-expression. */
3948 struct args_size stack_args_size;
3949 tree fntype = TREE_TYPE (fndecl);
3950 tree fnargs = DECL_ARGUMENTS (fndecl);
3951 /* This is used for the arg pointer when referring to stack args. */
3952 rtx internal_arg_pointer;
3953 /* This is a dummy PARM_DECL that we used for the function result if
3954 the function returns a structure. */
3955 tree function_result_decl = 0;
3956 #ifdef SETUP_INCOMING_VARARGS
3957 int varargs_setup = 0;
3958 #endif
3959 rtx conversion_insns = 0;
3960 struct args_size alignment_pad;
3962 /* Nonzero if the last arg is named `__builtin_va_alist',
3963 which is used on some machines for old-fashioned non-ANSI varargs.h;
3964 this should be stuck onto the stack as if it had arrived there. */
3965 int hide_last_arg
3966 = (current_function_varargs
3967 && fnargs
3968 && (parm = tree_last (fnargs)) != 0
3969 && DECL_NAME (parm)
3970 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3971 "__builtin_va_alist")));
3973 /* Nonzero if function takes extra anonymous args.
3974 This means the last named arg must be on the stack
3975 right before the anonymous ones. */
3976 int stdarg
3977 = (TYPE_ARG_TYPES (fntype) != 0
3978 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3979 != void_type_node));
3981 current_function_stdarg = stdarg;
3983 /* If the reg that the virtual arg pointer will be translated into is
3984 not a fixed reg or is the stack pointer, make a copy of the virtual
3985 arg pointer, and address parms via the copy. The frame pointer is
3986 considered fixed even though it is not marked as such.
3988 The second time through, simply use ap to avoid generating rtx. */
3990 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3991 || ! (fixed_regs[ARG_POINTER_REGNUM]
3992 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3993 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3994 else
3995 internal_arg_pointer = virtual_incoming_args_rtx;
3996 current_function_internal_arg_pointer = internal_arg_pointer;
3998 stack_args_size.constant = 0;
3999 stack_args_size.var = 0;
4001 /* If struct value address is treated as the first argument, make it so. */
4002 if (aggregate_value_p (DECL_RESULT (fndecl))
4003 && ! current_function_returns_pcc_struct
4004 && struct_value_incoming_rtx == 0)
4006 tree type = build_pointer_type (TREE_TYPE (fntype));
4008 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4010 DECL_ARG_TYPE (function_result_decl) = type;
4011 TREE_CHAIN (function_result_decl) = fnargs;
4012 fnargs = function_result_decl;
4015 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4016 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4018 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4019 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4020 #else
4021 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4022 #endif
4024 /* We haven't yet found an argument that we must push and pretend the
4025 caller did. */
4026 current_function_pretend_args_size = 0;
4028 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4030 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4031 struct args_size stack_offset;
4032 struct args_size arg_size;
4033 int passed_pointer = 0;
4034 int did_conversion = 0;
4035 tree passed_type = DECL_ARG_TYPE (parm);
4036 tree nominal_type = TREE_TYPE (parm);
4037 int pretend_named;
4039 /* Set LAST_NAMED if this is last named arg before some
4040 anonymous args. */
4041 int last_named = ((TREE_CHAIN (parm) == 0
4042 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4043 && (stdarg || current_function_varargs));
4044 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4045 most machines, if this is a varargs/stdarg function, then we treat
4046 the last named arg as if it were anonymous too. */
4047 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4049 if (TREE_TYPE (parm) == error_mark_node
4050 /* This can happen after weird syntax errors
4051 or if an enum type is defined among the parms. */
4052 || TREE_CODE (parm) != PARM_DECL
4053 || passed_type == NULL)
4055 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4056 = gen_rtx_MEM (BLKmode, const0_rtx);
4057 TREE_USED (parm) = 1;
4058 continue;
4061 /* For varargs.h function, save info about regs and stack space
4062 used by the individual args, not including the va_alist arg. */
4063 if (hide_last_arg && last_named)
4064 current_function_args_info = args_so_far;
4066 /* Find mode of arg as it is passed, and mode of arg
4067 as it should be during execution of this function. */
4068 passed_mode = TYPE_MODE (passed_type);
4069 nominal_mode = TYPE_MODE (nominal_type);
4071 /* If the parm's mode is VOID, its value doesn't matter,
4072 and avoid the usual things like emit_move_insn that could crash. */
4073 if (nominal_mode == VOIDmode)
4075 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4076 continue;
4079 /* If the parm is to be passed as a transparent union, use the
4080 type of the first field for the tests below. We have already
4081 verified that the modes are the same. */
4082 if (DECL_TRANSPARENT_UNION (parm)
4083 || TYPE_TRANSPARENT_UNION (passed_type))
4084 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4086 /* See if this arg was passed by invisible reference. It is if
4087 it is an object whose size depends on the contents of the
4088 object itself or if the machine requires these objects be passed
4089 that way. */
4091 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4092 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4093 || TREE_ADDRESSABLE (passed_type)
4094 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4095 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4096 passed_type, named_arg)
4097 #endif
4100 passed_type = nominal_type = build_pointer_type (passed_type);
4101 passed_pointer = 1;
4102 passed_mode = nominal_mode = Pmode;
4105 promoted_mode = passed_mode;
4107 #ifdef PROMOTE_FUNCTION_ARGS
4108 /* Compute the mode in which the arg is actually extended to. */
4109 unsignedp = TREE_UNSIGNED (passed_type);
4110 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4111 #endif
4113 /* Let machine desc say which reg (if any) the parm arrives in.
4114 0 means it arrives on the stack. */
4115 #ifdef FUNCTION_INCOMING_ARG
4116 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4117 passed_type, named_arg);
4118 #else
4119 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4120 passed_type, named_arg);
4121 #endif
4123 if (entry_parm == 0)
4124 promoted_mode = passed_mode;
4126 #ifdef SETUP_INCOMING_VARARGS
4127 /* If this is the last named parameter, do any required setup for
4128 varargs or stdargs. We need to know about the case of this being an
4129 addressable type, in which case we skip the registers it
4130 would have arrived in.
4132 For stdargs, LAST_NAMED will be set for two parameters, the one that
4133 is actually the last named, and the dummy parameter. We only
4134 want to do this action once.
4136 Also, indicate when RTL generation is to be suppressed. */
4137 if (last_named && !varargs_setup)
4139 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4140 current_function_pretend_args_size, 0);
4141 varargs_setup = 1;
4143 #endif
4145 /* Determine parm's home in the stack,
4146 in case it arrives in the stack or we should pretend it did.
4148 Compute the stack position and rtx where the argument arrives
4149 and its size.
4151 There is one complexity here: If this was a parameter that would
4152 have been passed in registers, but wasn't only because it is
4153 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4154 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4155 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4156 0 as it was the previous time. */
4158 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4159 locate_and_pad_parm (promoted_mode, passed_type,
4160 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4162 #else
4163 #ifdef FUNCTION_INCOMING_ARG
4164 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4165 passed_type,
4166 pretend_named) != 0,
4167 #else
4168 FUNCTION_ARG (args_so_far, promoted_mode,
4169 passed_type,
4170 pretend_named) != 0,
4171 #endif
4172 #endif
4173 fndecl, &stack_args_size, &stack_offset, &arg_size,
4174 &alignment_pad);
4177 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4179 if (offset_rtx == const0_rtx)
4180 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4181 else
4182 stack_parm = gen_rtx_MEM (promoted_mode,
4183 gen_rtx_PLUS (Pmode,
4184 internal_arg_pointer,
4185 offset_rtx));
4187 /* If this is a memory ref that contains aggregate components,
4188 mark it as such for cse and loop optimize. Likewise if it
4189 is readonly. */
4190 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4191 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4192 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4195 /* If this parameter was passed both in registers and in the stack,
4196 use the copy on the stack. */
4197 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4198 entry_parm = 0;
4200 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4201 /* If this parm was passed part in regs and part in memory,
4202 pretend it arrived entirely in memory
4203 by pushing the register-part onto the stack.
4205 In the special case of a DImode or DFmode that is split,
4206 we could put it together in a pseudoreg directly,
4207 but for now that's not worth bothering with. */
4209 if (entry_parm)
4211 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4212 passed_type, named_arg);
4214 if (nregs > 0)
4216 current_function_pretend_args_size
4217 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4218 / (PARM_BOUNDARY / BITS_PER_UNIT)
4219 * (PARM_BOUNDARY / BITS_PER_UNIT));
4221 /* Handle calls that pass values in multiple non-contiguous
4222 locations. The Irix 6 ABI has examples of this. */
4223 if (GET_CODE (entry_parm) == PARALLEL)
4224 emit_group_store (validize_mem (stack_parm), entry_parm,
4225 int_size_in_bytes (TREE_TYPE (parm)),
4226 (TYPE_ALIGN (TREE_TYPE (parm))
4227 / BITS_PER_UNIT));
4228 else
4229 move_block_from_reg (REGNO (entry_parm),
4230 validize_mem (stack_parm), nregs,
4231 int_size_in_bytes (TREE_TYPE (parm)));
4233 entry_parm = stack_parm;
4236 #endif
4238 /* If we didn't decide this parm came in a register,
4239 by default it came on the stack. */
4240 if (entry_parm == 0)
4241 entry_parm = stack_parm;
4243 /* Record permanently how this parm was passed. */
4244 DECL_INCOMING_RTL (parm) = entry_parm;
4246 /* If there is actually space on the stack for this parm,
4247 count it in stack_args_size; otherwise set stack_parm to 0
4248 to indicate there is no preallocated stack slot for the parm. */
4250 if (entry_parm == stack_parm
4251 || (GET_CODE (entry_parm) == PARALLEL
4252 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4253 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4254 /* On some machines, even if a parm value arrives in a register
4255 there is still an (uninitialized) stack slot allocated for it.
4257 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4258 whether this parameter already has a stack slot allocated,
4259 because an arg block exists only if current_function_args_size
4260 is larger than some threshold, and we haven't calculated that
4261 yet. So, for now, we just assume that stack slots never exist
4262 in this case. */
4263 || REG_PARM_STACK_SPACE (fndecl) > 0
4264 #endif
4267 stack_args_size.constant += arg_size.constant;
4268 if (arg_size.var)
4269 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4271 else
4272 /* No stack slot was pushed for this parm. */
4273 stack_parm = 0;
4275 /* Update info on where next arg arrives in registers. */
4277 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4278 passed_type, named_arg);
4280 /* If we can't trust the parm stack slot to be aligned enough
4281 for its ultimate type, don't use that slot after entry.
4282 We'll make another stack slot, if we need one. */
4284 int thisparm_boundary
4285 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4287 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4288 stack_parm = 0;
4291 /* If parm was passed in memory, and we need to convert it on entry,
4292 don't store it back in that same slot. */
4293 if (entry_parm != 0
4294 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4295 stack_parm = 0;
4297 #if 0
4298 /* Now adjust STACK_PARM to the mode and precise location
4299 where this parameter should live during execution,
4300 if we discover that it must live in the stack during execution.
4301 To make debuggers happier on big-endian machines, we store
4302 the value in the last bytes of the space available. */
4304 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4305 && stack_parm != 0)
4307 rtx offset_rtx;
4309 if (BYTES_BIG_ENDIAN
4310 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4311 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4312 - GET_MODE_SIZE (nominal_mode));
4314 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4315 if (offset_rtx == const0_rtx)
4316 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4317 else
4318 stack_parm = gen_rtx_MEM (nominal_mode,
4319 gen_rtx_PLUS (Pmode,
4320 internal_arg_pointer,
4321 offset_rtx));
4323 /* If this is a memory ref that contains aggregate components,
4324 mark it as such for cse and loop optimize. */
4325 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4327 #endif /* 0 */
4329 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4330 in the mode in which it arrives.
4331 STACK_PARM is an RTX for a stack slot where the parameter can live
4332 during the function (in case we want to put it there).
4333 STACK_PARM is 0 if no stack slot was pushed for it.
4335 Now output code if necessary to convert ENTRY_PARM to
4336 the type in which this function declares it,
4337 and store that result in an appropriate place,
4338 which may be a pseudo reg, may be STACK_PARM,
4339 or may be a local stack slot if STACK_PARM is 0.
4341 Set DECL_RTL to that place. */
4343 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4345 /* If a BLKmode arrives in registers, copy it to a stack slot.
4346 Handle calls that pass values in multiple non-contiguous
4347 locations. The Irix 6 ABI has examples of this. */
4348 if (GET_CODE (entry_parm) == REG
4349 || GET_CODE (entry_parm) == PARALLEL)
4351 int size_stored
4352 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4353 UNITS_PER_WORD);
4355 /* Note that we will be storing an integral number of words.
4356 So we have to be careful to ensure that we allocate an
4357 integral number of words. We do this below in the
4358 assign_stack_local if space was not allocated in the argument
4359 list. If it was, this will not work if PARM_BOUNDARY is not
4360 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4361 if it becomes a problem. */
4363 if (stack_parm == 0)
4365 stack_parm
4366 = assign_stack_local (GET_MODE (entry_parm),
4367 size_stored, 0);
4369 /* If this is a memory ref that contains aggregate
4370 components, mark it as such for cse and loop optimize. */
4371 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4374 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4375 abort ();
4377 if (TREE_READONLY (parm))
4378 RTX_UNCHANGING_P (stack_parm) = 1;
4380 /* Handle calls that pass values in multiple non-contiguous
4381 locations. The Irix 6 ABI has examples of this. */
4382 if (GET_CODE (entry_parm) == PARALLEL)
4383 emit_group_store (validize_mem (stack_parm), entry_parm,
4384 int_size_in_bytes (TREE_TYPE (parm)),
4385 (TYPE_ALIGN (TREE_TYPE (parm))
4386 / BITS_PER_UNIT));
4387 else
4388 move_block_from_reg (REGNO (entry_parm),
4389 validize_mem (stack_parm),
4390 size_stored / UNITS_PER_WORD,
4391 int_size_in_bytes (TREE_TYPE (parm)));
4393 DECL_RTL (parm) = stack_parm;
4395 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4396 && ! DECL_INLINE (fndecl))
4397 /* layout_decl may set this. */
4398 || TREE_ADDRESSABLE (parm)
4399 || TREE_SIDE_EFFECTS (parm)
4400 /* If -ffloat-store specified, don't put explicit
4401 float variables into registers. */
4402 || (flag_float_store
4403 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4404 /* Always assign pseudo to structure return or item passed
4405 by invisible reference. */
4406 || passed_pointer || parm == function_result_decl)
4408 /* Store the parm in a pseudoregister during the function, but we
4409 may need to do it in a wider mode. */
4411 register rtx parmreg;
4412 int regno, regnoi = 0, regnor = 0;
4414 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4416 promoted_nominal_mode
4417 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4419 parmreg = gen_reg_rtx (promoted_nominal_mode);
4420 mark_user_reg (parmreg);
4422 /* If this was an item that we received a pointer to, set DECL_RTL
4423 appropriately. */
4424 if (passed_pointer)
4426 DECL_RTL (parm)
4427 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4428 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4430 else
4431 DECL_RTL (parm) = parmreg;
4433 /* Copy the value into the register. */
4434 if (nominal_mode != passed_mode
4435 || promoted_nominal_mode != promoted_mode)
4437 int save_tree_used;
4438 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4439 mode, by the caller. We now have to convert it to
4440 NOMINAL_MODE, if different. However, PARMREG may be in
4441 a different mode than NOMINAL_MODE if it is being stored
4442 promoted.
4444 If ENTRY_PARM is a hard register, it might be in a register
4445 not valid for operating in its mode (e.g., an odd-numbered
4446 register for a DFmode). In that case, moves are the only
4447 thing valid, so we can't do a convert from there. This
4448 occurs when the calling sequence allow such misaligned
4449 usages.
4451 In addition, the conversion may involve a call, which could
4452 clobber parameters which haven't been copied to pseudo
4453 registers yet. Therefore, we must first copy the parm to
4454 a pseudo reg here, and save the conversion until after all
4455 parameters have been moved. */
4457 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4459 emit_move_insn (tempreg, validize_mem (entry_parm));
4461 push_to_sequence (conversion_insns);
4462 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4464 /* TREE_USED gets set erroneously during expand_assignment. */
4465 save_tree_used = TREE_USED (parm);
4466 expand_assignment (parm,
4467 make_tree (nominal_type, tempreg), 0, 0);
4468 TREE_USED (parm) = save_tree_used;
4469 conversion_insns = get_insns ();
4470 did_conversion = 1;
4471 end_sequence ();
4473 else
4474 emit_move_insn (parmreg, validize_mem (entry_parm));
4476 /* If we were passed a pointer but the actual value
4477 can safely live in a register, put it in one. */
4478 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4479 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4480 && ! DECL_INLINE (fndecl))
4481 /* layout_decl may set this. */
4482 || TREE_ADDRESSABLE (parm)
4483 || TREE_SIDE_EFFECTS (parm)
4484 /* If -ffloat-store specified, don't put explicit
4485 float variables into registers. */
4486 || (flag_float_store
4487 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4489 /* We can't use nominal_mode, because it will have been set to
4490 Pmode above. We must use the actual mode of the parm. */
4491 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4492 mark_user_reg (parmreg);
4493 emit_move_insn (parmreg, DECL_RTL (parm));
4494 DECL_RTL (parm) = parmreg;
4495 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4496 now the parm. */
4497 stack_parm = 0;
4499 #ifdef FUNCTION_ARG_CALLEE_COPIES
4500 /* If we are passed an arg by reference and it is our responsibility
4501 to make a copy, do it now.
4502 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4503 original argument, so we must recreate them in the call to
4504 FUNCTION_ARG_CALLEE_COPIES. */
4505 /* ??? Later add code to handle the case that if the argument isn't
4506 modified, don't do the copy. */
4508 else if (passed_pointer
4509 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4510 TYPE_MODE (DECL_ARG_TYPE (parm)),
4511 DECL_ARG_TYPE (parm),
4512 named_arg)
4513 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4515 rtx copy;
4516 tree type = DECL_ARG_TYPE (parm);
4518 /* This sequence may involve a library call perhaps clobbering
4519 registers that haven't been copied to pseudos yet. */
4521 push_to_sequence (conversion_insns);
4523 if (TYPE_SIZE (type) == 0
4524 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4525 /* This is a variable sized object. */
4526 copy = gen_rtx_MEM (BLKmode,
4527 allocate_dynamic_stack_space
4528 (expr_size (parm), NULL_RTX,
4529 TYPE_ALIGN (type)));
4530 else
4531 copy = assign_stack_temp (TYPE_MODE (type),
4532 int_size_in_bytes (type), 1);
4533 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4534 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4536 store_expr (parm, copy, 0);
4537 emit_move_insn (parmreg, XEXP (copy, 0));
4538 if (current_function_check_memory_usage)
4539 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4540 XEXP (copy, 0), Pmode,
4541 GEN_INT (int_size_in_bytes (type)),
4542 TYPE_MODE (sizetype),
4543 GEN_INT (MEMORY_USE_RW),
4544 TYPE_MODE (integer_type_node));
4545 conversion_insns = get_insns ();
4546 did_conversion = 1;
4547 end_sequence ();
4549 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4551 /* In any case, record the parm's desired stack location
4552 in case we later discover it must live in the stack.
4554 If it is a COMPLEX value, store the stack location for both
4555 halves. */
4557 if (GET_CODE (parmreg) == CONCAT)
4558 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4559 else
4560 regno = REGNO (parmreg);
4562 if (regno >= max_parm_reg)
4564 rtx *new;
4565 int old_max_parm_reg = max_parm_reg;
4567 /* It's slow to expand this one register at a time,
4568 but it's also rare and we need max_parm_reg to be
4569 precisely correct. */
4570 max_parm_reg = regno + 1;
4571 new = (rtx *) xrealloc (parm_reg_stack_loc,
4572 max_parm_reg * sizeof (rtx));
4573 bzero ((char *) (new + old_max_parm_reg),
4574 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4575 parm_reg_stack_loc = new;
4578 if (GET_CODE (parmreg) == CONCAT)
4580 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4582 regnor = REGNO (gen_realpart (submode, parmreg));
4583 regnoi = REGNO (gen_imagpart (submode, parmreg));
4585 if (stack_parm != 0)
4587 parm_reg_stack_loc[regnor]
4588 = gen_realpart (submode, stack_parm);
4589 parm_reg_stack_loc[regnoi]
4590 = gen_imagpart (submode, stack_parm);
4592 else
4594 parm_reg_stack_loc[regnor] = 0;
4595 parm_reg_stack_loc[regnoi] = 0;
4598 else
4599 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4601 /* Mark the register as eliminable if we did no conversion
4602 and it was copied from memory at a fixed offset,
4603 and the arg pointer was not copied to a pseudo-reg.
4604 If the arg pointer is a pseudo reg or the offset formed
4605 an invalid address, such memory-equivalences
4606 as we make here would screw up life analysis for it. */
4607 if (nominal_mode == passed_mode
4608 && ! did_conversion
4609 && stack_parm != 0
4610 && GET_CODE (stack_parm) == MEM
4611 && stack_offset.var == 0
4612 && reg_mentioned_p (virtual_incoming_args_rtx,
4613 XEXP (stack_parm, 0)))
4615 rtx linsn = get_last_insn ();
4616 rtx sinsn, set;
4618 /* Mark complex types separately. */
4619 if (GET_CODE (parmreg) == CONCAT)
4620 /* Scan backwards for the set of the real and
4621 imaginary parts. */
4622 for (sinsn = linsn; sinsn != 0;
4623 sinsn = prev_nonnote_insn (sinsn))
4625 set = single_set (sinsn);
4626 if (set != 0
4627 && SET_DEST (set) == regno_reg_rtx [regnoi])
4628 REG_NOTES (sinsn)
4629 = gen_rtx_EXPR_LIST (REG_EQUIV,
4630 parm_reg_stack_loc[regnoi],
4631 REG_NOTES (sinsn));
4632 else if (set != 0
4633 && SET_DEST (set) == regno_reg_rtx [regnor])
4634 REG_NOTES (sinsn)
4635 = gen_rtx_EXPR_LIST (REG_EQUIV,
4636 parm_reg_stack_loc[regnor],
4637 REG_NOTES (sinsn));
4639 else if ((set = single_set (linsn)) != 0
4640 && SET_DEST (set) == parmreg)
4641 REG_NOTES (linsn)
4642 = gen_rtx_EXPR_LIST (REG_EQUIV,
4643 stack_parm, REG_NOTES (linsn));
4646 /* For pointer data type, suggest pointer register. */
4647 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4648 mark_reg_pointer (parmreg,
4649 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4650 / BITS_PER_UNIT));
4652 else
4654 /* Value must be stored in the stack slot STACK_PARM
4655 during function execution. */
4657 if (promoted_mode != nominal_mode)
4659 /* Conversion is required. */
4660 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4662 emit_move_insn (tempreg, validize_mem (entry_parm));
4664 push_to_sequence (conversion_insns);
4665 entry_parm = convert_to_mode (nominal_mode, tempreg,
4666 TREE_UNSIGNED (TREE_TYPE (parm)));
4667 if (stack_parm)
4669 /* ??? This may need a big-endian conversion on sparc64. */
4670 stack_parm = change_address (stack_parm, nominal_mode,
4671 NULL_RTX);
4673 conversion_insns = get_insns ();
4674 did_conversion = 1;
4675 end_sequence ();
4678 if (entry_parm != stack_parm)
4680 if (stack_parm == 0)
4682 stack_parm
4683 = assign_stack_local (GET_MODE (entry_parm),
4684 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4685 /* If this is a memory ref that contains aggregate components,
4686 mark it as such for cse and loop optimize. */
4687 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4690 if (promoted_mode != nominal_mode)
4692 push_to_sequence (conversion_insns);
4693 emit_move_insn (validize_mem (stack_parm),
4694 validize_mem (entry_parm));
4695 conversion_insns = get_insns ();
4696 end_sequence ();
4698 else
4699 emit_move_insn (validize_mem (stack_parm),
4700 validize_mem (entry_parm));
4702 if (current_function_check_memory_usage)
4704 push_to_sequence (conversion_insns);
4705 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4706 XEXP (stack_parm, 0), Pmode,
4707 GEN_INT (GET_MODE_SIZE (GET_MODE
4708 (entry_parm))),
4709 TYPE_MODE (sizetype),
4710 GEN_INT (MEMORY_USE_RW),
4711 TYPE_MODE (integer_type_node));
4713 conversion_insns = get_insns ();
4714 end_sequence ();
4716 DECL_RTL (parm) = stack_parm;
4719 /* If this "parameter" was the place where we are receiving the
4720 function's incoming structure pointer, set up the result. */
4721 if (parm == function_result_decl)
4723 tree result = DECL_RESULT (fndecl);
4724 tree restype = TREE_TYPE (result);
4726 DECL_RTL (result)
4727 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4729 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4730 AGGREGATE_TYPE_P (restype));
4733 if (TREE_THIS_VOLATILE (parm))
4734 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4735 if (TREE_READONLY (parm))
4736 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4739 /* Output all parameter conversion instructions (possibly including calls)
4740 now that all parameters have been copied out of hard registers. */
4741 emit_insns (conversion_insns);
4743 last_parm_insn = get_last_insn ();
4745 current_function_args_size = stack_args_size.constant;
4747 /* Adjust function incoming argument size for alignment and
4748 minimum length. */
4750 #ifdef REG_PARM_STACK_SPACE
4751 #ifndef MAYBE_REG_PARM_STACK_SPACE
4752 current_function_args_size = MAX (current_function_args_size,
4753 REG_PARM_STACK_SPACE (fndecl));
4754 #endif
4755 #endif
4757 #ifdef STACK_BOUNDARY
4758 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4760 current_function_args_size
4761 = ((current_function_args_size + STACK_BYTES - 1)
4762 / STACK_BYTES) * STACK_BYTES;
4763 #endif
4765 #ifdef ARGS_GROW_DOWNWARD
4766 current_function_arg_offset_rtx
4767 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4768 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4769 size_int (-stack_args_size.constant)),
4770 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4771 #else
4772 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4773 #endif
4775 /* See how many bytes, if any, of its args a function should try to pop
4776 on return. */
4778 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4779 current_function_args_size);
4781 /* For stdarg.h function, save info about
4782 regs and stack space used by the named args. */
4784 if (!hide_last_arg)
4785 current_function_args_info = args_so_far;
4787 /* Set the rtx used for the function return value. Put this in its
4788 own variable so any optimizers that need this information don't have
4789 to include tree.h. Do this here so it gets done when an inlined
4790 function gets output. */
4792 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4795 /* Indicate whether REGNO is an incoming argument to the current function
4796 that was promoted to a wider mode. If so, return the RTX for the
4797 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4798 that REGNO is promoted from and whether the promotion was signed or
4799 unsigned. */
4801 #ifdef PROMOTE_FUNCTION_ARGS
4804 promoted_input_arg (regno, pmode, punsignedp)
4805 int regno;
4806 enum machine_mode *pmode;
4807 int *punsignedp;
4809 tree arg;
4811 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4812 arg = TREE_CHAIN (arg))
4813 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4814 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4815 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4817 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4818 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4820 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4821 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4822 && mode != DECL_MODE (arg))
4824 *pmode = DECL_MODE (arg);
4825 *punsignedp = unsignedp;
4826 return DECL_INCOMING_RTL (arg);
4830 return 0;
4833 #endif
4835 /* Compute the size and offset from the start of the stacked arguments for a
4836 parm passed in mode PASSED_MODE and with type TYPE.
4838 INITIAL_OFFSET_PTR points to the current offset into the stacked
4839 arguments.
4841 The starting offset and size for this parm are returned in *OFFSET_PTR
4842 and *ARG_SIZE_PTR, respectively.
4844 IN_REGS is non-zero if the argument will be passed in registers. It will
4845 never be set if REG_PARM_STACK_SPACE is not defined.
4847 FNDECL is the function in which the argument was defined.
4849 There are two types of rounding that are done. The first, controlled by
4850 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4851 list to be aligned to the specific boundary (in bits). This rounding
4852 affects the initial and starting offsets, but not the argument size.
4854 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4855 optionally rounds the size of the parm to PARM_BOUNDARY. The
4856 initial offset is not affected by this rounding, while the size always
4857 is and the starting offset may be. */
4859 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4860 initial_offset_ptr is positive because locate_and_pad_parm's
4861 callers pass in the total size of args so far as
4862 initial_offset_ptr. arg_size_ptr is always positive.*/
4864 void
4865 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4866 initial_offset_ptr, offset_ptr, arg_size_ptr,
4867 alignment_pad)
4868 enum machine_mode passed_mode;
4869 tree type;
4870 int in_regs;
4871 tree fndecl ATTRIBUTE_UNUSED;
4872 struct args_size *initial_offset_ptr;
4873 struct args_size *offset_ptr;
4874 struct args_size *arg_size_ptr;
4875 struct args_size *alignment_pad;
4878 tree sizetree
4879 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4880 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4881 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4883 #ifdef REG_PARM_STACK_SPACE
4884 /* If we have found a stack parm before we reach the end of the
4885 area reserved for registers, skip that area. */
4886 if (! in_regs)
4888 int reg_parm_stack_space = 0;
4890 #ifdef MAYBE_REG_PARM_STACK_SPACE
4891 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4892 #else
4893 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4894 #endif
4895 if (reg_parm_stack_space > 0)
4897 if (initial_offset_ptr->var)
4899 initial_offset_ptr->var
4900 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4901 size_int (reg_parm_stack_space));
4902 initial_offset_ptr->constant = 0;
4904 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4905 initial_offset_ptr->constant = reg_parm_stack_space;
4908 #endif /* REG_PARM_STACK_SPACE */
4910 arg_size_ptr->var = 0;
4911 arg_size_ptr->constant = 0;
4913 #ifdef ARGS_GROW_DOWNWARD
4914 if (initial_offset_ptr->var)
4916 offset_ptr->constant = 0;
4917 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4918 initial_offset_ptr->var);
4920 else
4922 offset_ptr->constant = - initial_offset_ptr->constant;
4923 offset_ptr->var = 0;
4925 if (where_pad != none
4926 && (TREE_CODE (sizetree) != INTEGER_CST
4927 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4928 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4929 SUB_PARM_SIZE (*offset_ptr, sizetree);
4930 if (where_pad != downward)
4931 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
4932 if (initial_offset_ptr->var)
4934 arg_size_ptr->var = size_binop (MINUS_EXPR,
4935 size_binop (MINUS_EXPR,
4936 integer_zero_node,
4937 initial_offset_ptr->var),
4938 offset_ptr->var);
4940 else
4942 arg_size_ptr->constant = (- initial_offset_ptr->constant
4943 - offset_ptr->constant);
4945 #else /* !ARGS_GROW_DOWNWARD */
4946 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
4947 *offset_ptr = *initial_offset_ptr;
4949 #ifdef PUSH_ROUNDING
4950 if (passed_mode != BLKmode)
4951 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4952 #endif
4954 /* Pad_below needs the pre-rounded size to know how much to pad below
4955 so this must be done before rounding up. */
4956 if (where_pad == downward
4957 /* However, BLKmode args passed in regs have their padding done elsewhere.
4958 The stack slot must be able to hold the entire register. */
4959 && !(in_regs && passed_mode == BLKmode))
4960 pad_below (offset_ptr, passed_mode, sizetree);
4962 if (where_pad != none
4963 && (TREE_CODE (sizetree) != INTEGER_CST
4964 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4965 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4967 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4968 #endif /* ARGS_GROW_DOWNWARD */
4971 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4972 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4974 static void
4975 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
4976 struct args_size *offset_ptr;
4977 int boundary;
4978 struct args_size *alignment_pad;
4980 tree save_var;
4981 HOST_WIDE_INT save_constant;
4983 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4985 if (boundary > PARM_BOUNDARY)
4987 save_var = offset_ptr->var;
4988 save_constant = offset_ptr->constant;
4991 alignment_pad->var = NULL_TREE;
4992 alignment_pad->constant = 0;
4994 if (boundary > BITS_PER_UNIT)
4996 if (offset_ptr->var)
4998 offset_ptr->var =
4999 #ifdef ARGS_GROW_DOWNWARD
5000 round_down
5001 #else
5002 round_up
5003 #endif
5004 (ARGS_SIZE_TREE (*offset_ptr),
5005 boundary / BITS_PER_UNIT);
5006 offset_ptr->constant = 0; /*?*/
5007 if (boundary > PARM_BOUNDARY)
5008 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5010 else
5012 offset_ptr->constant =
5013 #ifdef ARGS_GROW_DOWNWARD
5014 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5015 #else
5016 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5017 #endif
5018 if (boundary > PARM_BOUNDARY)
5019 alignment_pad->constant = offset_ptr->constant - save_constant;
5024 #ifndef ARGS_GROW_DOWNWARD
5025 static void
5026 pad_below (offset_ptr, passed_mode, sizetree)
5027 struct args_size *offset_ptr;
5028 enum machine_mode passed_mode;
5029 tree sizetree;
5031 if (passed_mode != BLKmode)
5033 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5034 offset_ptr->constant
5035 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5036 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5037 - GET_MODE_SIZE (passed_mode));
5039 else
5041 if (TREE_CODE (sizetree) != INTEGER_CST
5042 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5044 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5045 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5046 /* Add it in. */
5047 ADD_PARM_SIZE (*offset_ptr, s2);
5048 SUB_PARM_SIZE (*offset_ptr, sizetree);
5052 #endif
5054 #ifdef ARGS_GROW_DOWNWARD
5055 static tree
5056 round_down (value, divisor)
5057 tree value;
5058 int divisor;
5060 return size_binop (MULT_EXPR,
5061 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5062 size_int (divisor));
5064 #endif
5066 /* Walk the tree of blocks describing the binding levels within a function
5067 and warn about uninitialized variables.
5068 This is done after calling flow_analysis and before global_alloc
5069 clobbers the pseudo-regs to hard regs. */
5071 void
5072 uninitialized_vars_warning (block)
5073 tree block;
5075 register tree decl, sub;
5076 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5078 if (TREE_CODE (decl) == VAR_DECL
5079 /* These warnings are unreliable for and aggregates
5080 because assigning the fields one by one can fail to convince
5081 flow.c that the entire aggregate was initialized.
5082 Unions are troublesome because members may be shorter. */
5083 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5084 && DECL_RTL (decl) != 0
5085 && GET_CODE (DECL_RTL (decl)) == REG
5086 /* Global optimizations can make it difficult to determine if a
5087 particular variable has been initialized. However, a VAR_DECL
5088 with a nonzero DECL_INITIAL had an initializer, so do not
5089 claim it is potentially uninitialized.
5091 We do not care about the actual value in DECL_INITIAL, so we do
5092 not worry that it may be a dangling pointer. */
5093 && DECL_INITIAL (decl) == NULL_TREE
5094 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5095 warning_with_decl (decl,
5096 "`%s' might be used uninitialized in this function");
5097 if (TREE_CODE (decl) == VAR_DECL
5098 && DECL_RTL (decl) != 0
5099 && GET_CODE (DECL_RTL (decl)) == REG
5100 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5101 warning_with_decl (decl,
5102 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5104 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5105 uninitialized_vars_warning (sub);
5108 /* Do the appropriate part of uninitialized_vars_warning
5109 but for arguments instead of local variables. */
5111 void
5112 setjmp_args_warning ()
5114 register tree decl;
5115 for (decl = DECL_ARGUMENTS (current_function_decl);
5116 decl; decl = TREE_CHAIN (decl))
5117 if (DECL_RTL (decl) != 0
5118 && GET_CODE (DECL_RTL (decl)) == REG
5119 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5120 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5123 /* If this function call setjmp, put all vars into the stack
5124 unless they were declared `register'. */
5126 void
5127 setjmp_protect (block)
5128 tree block;
5130 register tree decl, sub;
5131 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5132 if ((TREE_CODE (decl) == VAR_DECL
5133 || TREE_CODE (decl) == PARM_DECL)
5134 && DECL_RTL (decl) != 0
5135 && (GET_CODE (DECL_RTL (decl)) == REG
5136 || (GET_CODE (DECL_RTL (decl)) == MEM
5137 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5138 /* If this variable came from an inline function, it must be
5139 that its life doesn't overlap the setjmp. If there was a
5140 setjmp in the function, it would already be in memory. We
5141 must exclude such variable because their DECL_RTL might be
5142 set to strange things such as virtual_stack_vars_rtx. */
5143 && ! DECL_FROM_INLINE (decl)
5144 && (
5145 #ifdef NON_SAVING_SETJMP
5146 /* If longjmp doesn't restore the registers,
5147 don't put anything in them. */
5148 NON_SAVING_SETJMP
5150 #endif
5151 ! DECL_REGISTER (decl)))
5152 put_var_into_stack (decl);
5153 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5154 setjmp_protect (sub);
5157 /* Like the previous function, but for args instead of local variables. */
5159 void
5160 setjmp_protect_args ()
5162 register tree decl;
5163 for (decl = DECL_ARGUMENTS (current_function_decl);
5164 decl; decl = TREE_CHAIN (decl))
5165 if ((TREE_CODE (decl) == VAR_DECL
5166 || TREE_CODE (decl) == PARM_DECL)
5167 && DECL_RTL (decl) != 0
5168 && (GET_CODE (DECL_RTL (decl)) == REG
5169 || (GET_CODE (DECL_RTL (decl)) == MEM
5170 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5171 && (
5172 /* If longjmp doesn't restore the registers,
5173 don't put anything in them. */
5174 #ifdef NON_SAVING_SETJMP
5175 NON_SAVING_SETJMP
5177 #endif
5178 ! DECL_REGISTER (decl)))
5179 put_var_into_stack (decl);
5182 /* Return the context-pointer register corresponding to DECL,
5183 or 0 if it does not need one. */
5186 lookup_static_chain (decl)
5187 tree decl;
5189 tree context = decl_function_context (decl);
5190 tree link;
5192 if (context == 0
5193 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5194 return 0;
5196 /* We treat inline_function_decl as an alias for the current function
5197 because that is the inline function whose vars, types, etc.
5198 are being merged into the current function.
5199 See expand_inline_function. */
5200 if (context == current_function_decl || context == inline_function_decl)
5201 return virtual_stack_vars_rtx;
5203 for (link = context_display; link; link = TREE_CHAIN (link))
5204 if (TREE_PURPOSE (link) == context)
5205 return RTL_EXPR_RTL (TREE_VALUE (link));
5207 abort ();
5210 /* Convert a stack slot address ADDR for variable VAR
5211 (from a containing function)
5212 into an address valid in this function (using a static chain). */
5215 fix_lexical_addr (addr, var)
5216 rtx addr;
5217 tree var;
5219 rtx basereg;
5220 HOST_WIDE_INT displacement;
5221 tree context = decl_function_context (var);
5222 struct function *fp;
5223 rtx base = 0;
5225 /* If this is the present function, we need not do anything. */
5226 if (context == current_function_decl || context == inline_function_decl)
5227 return addr;
5229 for (fp = outer_function_chain; fp; fp = fp->next)
5230 if (fp->decl == context)
5231 break;
5233 if (fp == 0)
5234 abort ();
5236 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5237 addr = XEXP (XEXP (addr, 0), 0);
5239 /* Decode given address as base reg plus displacement. */
5240 if (GET_CODE (addr) == REG)
5241 basereg = addr, displacement = 0;
5242 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5243 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5244 else
5245 abort ();
5247 /* We accept vars reached via the containing function's
5248 incoming arg pointer and via its stack variables pointer. */
5249 if (basereg == fp->internal_arg_pointer)
5251 /* If reached via arg pointer, get the arg pointer value
5252 out of that function's stack frame.
5254 There are two cases: If a separate ap is needed, allocate a
5255 slot in the outer function for it and dereference it that way.
5256 This is correct even if the real ap is actually a pseudo.
5257 Otherwise, just adjust the offset from the frame pointer to
5258 compensate. */
5260 #ifdef NEED_SEPARATE_AP
5261 rtx addr;
5263 if (fp->x_arg_pointer_save_area == 0)
5264 fp->x_arg_pointer_save_area
5265 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5267 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5268 addr = memory_address (Pmode, addr);
5270 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5271 #else
5272 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5273 base = lookup_static_chain (var);
5274 #endif
5277 else if (basereg == virtual_stack_vars_rtx)
5279 /* This is the same code as lookup_static_chain, duplicated here to
5280 avoid an extra call to decl_function_context. */
5281 tree link;
5283 for (link = context_display; link; link = TREE_CHAIN (link))
5284 if (TREE_PURPOSE (link) == context)
5286 base = RTL_EXPR_RTL (TREE_VALUE (link));
5287 break;
5291 if (base == 0)
5292 abort ();
5294 /* Use same offset, relative to appropriate static chain or argument
5295 pointer. */
5296 return plus_constant (base, displacement);
5299 /* Return the address of the trampoline for entering nested fn FUNCTION.
5300 If necessary, allocate a trampoline (in the stack frame)
5301 and emit rtl to initialize its contents (at entry to this function). */
5304 trampoline_address (function)
5305 tree function;
5307 tree link;
5308 tree rtlexp;
5309 rtx tramp;
5310 struct function *fp;
5311 tree fn_context;
5313 /* Find an existing trampoline and return it. */
5314 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5315 if (TREE_PURPOSE (link) == function)
5316 return
5317 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5319 for (fp = outer_function_chain; fp; fp = fp->next)
5320 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5321 if (TREE_PURPOSE (link) == function)
5323 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5324 function);
5325 return round_trampoline_addr (tramp);
5328 /* None exists; we must make one. */
5330 /* Find the `struct function' for the function containing FUNCTION. */
5331 fp = 0;
5332 fn_context = decl_function_context (function);
5333 if (fn_context != current_function_decl
5334 && fn_context != inline_function_decl)
5335 for (fp = outer_function_chain; fp; fp = fp->next)
5336 if (fp->decl == fn_context)
5337 break;
5339 /* Allocate run-time space for this trampoline
5340 (usually in the defining function's stack frame). */
5341 #ifdef ALLOCATE_TRAMPOLINE
5342 tramp = ALLOCATE_TRAMPOLINE (fp);
5343 #else
5344 /* If rounding needed, allocate extra space
5345 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5346 #ifdef TRAMPOLINE_ALIGNMENT
5347 #define TRAMPOLINE_REAL_SIZE \
5348 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5349 #else
5350 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5351 #endif
5352 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5353 fp ? fp : current_function);
5354 #endif
5356 /* Record the trampoline for reuse and note it for later initialization
5357 by expand_function_end. */
5358 if (fp != 0)
5360 push_obstacks (fp->function_maybepermanent_obstack,
5361 fp->function_maybepermanent_obstack);
5362 rtlexp = make_node (RTL_EXPR);
5363 RTL_EXPR_RTL (rtlexp) = tramp;
5364 fp->x_trampoline_list = tree_cons (function, rtlexp,
5365 fp->x_trampoline_list);
5366 pop_obstacks ();
5368 else
5370 /* Make the RTL_EXPR node temporary, not momentary, so that the
5371 trampoline_list doesn't become garbage. */
5372 int momentary = suspend_momentary ();
5373 rtlexp = make_node (RTL_EXPR);
5374 resume_momentary (momentary);
5376 RTL_EXPR_RTL (rtlexp) = tramp;
5377 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5380 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5381 return round_trampoline_addr (tramp);
5384 /* Given a trampoline address,
5385 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5387 static rtx
5388 round_trampoline_addr (tramp)
5389 rtx tramp;
5391 #ifdef TRAMPOLINE_ALIGNMENT
5392 /* Round address up to desired boundary. */
5393 rtx temp = gen_reg_rtx (Pmode);
5394 temp = expand_binop (Pmode, add_optab, tramp,
5395 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5396 temp, 0, OPTAB_LIB_WIDEN);
5397 tramp = expand_binop (Pmode, and_optab, temp,
5398 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5399 temp, 0, OPTAB_LIB_WIDEN);
5400 #endif
5401 return tramp;
5404 /* Insert the BLOCK in the block-tree before LAST_INSN. */
5406 void
5407 retrofit_block (block, last_insn)
5408 tree block;
5409 rtx last_insn;
5411 rtx insn;
5413 /* Now insert the new BLOCK at the right place in the block trees
5414 for the function which called the inline function. We just look
5415 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5416 beginning of a block, then this new block becomes the first
5417 subblock of that block. If we find the end of a block, then this
5418 new block follows that block in the list of blocks. */
5419 for (insn = last_insn; insn; insn = PREV_INSN (insn))
5420 if (GET_CODE (insn) == NOTE
5421 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
5422 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
5423 break;
5424 if (!insn || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5426 tree superblock;
5428 if (insn)
5429 superblock = NOTE_BLOCK (insn);
5430 else
5431 superblock = DECL_INITIAL (current_function_decl);
5433 BLOCK_SUPERCONTEXT (block) = superblock;
5434 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (superblock);
5435 BLOCK_SUBBLOCKS (superblock) = block;
5437 else
5439 tree prevblock = NOTE_BLOCK (insn);
5441 BLOCK_SUPERCONTEXT (block) = BLOCK_SUPERCONTEXT (prevblock);
5442 BLOCK_CHAIN (block) = BLOCK_CHAIN (prevblock);
5443 BLOCK_CHAIN (prevblock) = block;
5447 /* The functions identify_blocks and reorder_blocks provide a way to
5448 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5449 duplicate portions of the RTL code. Call identify_blocks before
5450 changing the RTL, and call reorder_blocks after. */
5452 /* Put all this function's BLOCK nodes including those that are chained
5453 onto the first block into a vector, and return it.
5454 Also store in each NOTE for the beginning or end of a block
5455 the index of that block in the vector.
5456 The arguments are BLOCK, the chain of top-level blocks of the function,
5457 and INSNS, the insn chain of the function. */
5459 void
5460 identify_blocks (block, insns)
5461 tree block;
5462 rtx insns;
5464 int n_blocks;
5465 tree *block_vector;
5466 tree *block_stack;
5467 int depth = 0;
5468 int current_block_number = 1;
5469 rtx insn;
5471 if (block == 0)
5472 return;
5474 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5475 depth-first order. */
5476 n_blocks = all_blocks (block, 0);
5477 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5478 all_blocks (block, block_vector);
5480 block_stack = (tree *) alloca (n_blocks * sizeof (tree));
5482 for (insn = insns; insn; insn = NEXT_INSN (insn))
5483 if (GET_CODE (insn) == NOTE)
5485 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5487 tree b;
5489 /* If there are more block notes than BLOCKs, something
5490 is badly wrong. */
5491 if (current_block_number == n_blocks)
5492 abort ();
5494 b = block_vector[current_block_number++];
5495 NOTE_BLOCK (insn) = b;
5496 block_stack[depth++] = b;
5498 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5500 if (depth == 0)
5501 /* There are more NOTE_INSN_BLOCK_ENDs that
5502 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5503 abort ();
5505 NOTE_BLOCK (insn) = block_stack[--depth];
5509 /* In whole-function mode, we might not have seen the whole function
5510 yet, so we might not use up all the blocks. */
5511 if (n_blocks != current_block_number
5512 && !current_function->x_whole_function_mode_p)
5513 abort ();
5515 free (block_vector);
5518 /* Given a revised instruction chain, rebuild the tree structure of
5519 BLOCK nodes to correspond to the new order of RTL. The new block
5520 tree is inserted below TOP_BLOCK. Returns the current top-level
5521 block. */
5523 tree
5524 reorder_blocks (block, insns)
5525 tree block;
5526 rtx insns;
5528 tree current_block = block;
5529 rtx insn;
5531 if (block == NULL_TREE)
5532 return NULL_TREE;
5534 /* Prune the old trees away, so that it doesn't get in the way. */
5535 BLOCK_SUBBLOCKS (current_block) = 0;
5536 BLOCK_CHAIN (current_block) = 0;
5538 for (insn = insns; insn; insn = NEXT_INSN (insn))
5539 if (GET_CODE (insn) == NOTE)
5541 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5543 tree block = NOTE_BLOCK (insn);
5544 /* If we have seen this block before, copy it. */
5545 if (TREE_ASM_WRITTEN (block))
5546 block = copy_node (block);
5547 BLOCK_SUBBLOCKS (block) = 0;
5548 TREE_ASM_WRITTEN (block) = 1;
5549 BLOCK_SUPERCONTEXT (block) = current_block;
5550 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5551 BLOCK_SUBBLOCKS (current_block) = block;
5552 current_block = block;
5553 NOTE_SOURCE_FILE (insn) = 0;
5555 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5557 BLOCK_SUBBLOCKS (current_block)
5558 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5559 current_block = BLOCK_SUPERCONTEXT (current_block);
5560 NOTE_SOURCE_FILE (insn) = 0;
5564 BLOCK_SUBBLOCKS (current_block)
5565 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5566 return current_block;
5569 /* Reverse the order of elements in the chain T of blocks,
5570 and return the new head of the chain (old last element). */
5572 static tree
5573 blocks_nreverse (t)
5574 tree t;
5576 register tree prev = 0, decl, next;
5577 for (decl = t; decl; decl = next)
5579 next = BLOCK_CHAIN (decl);
5580 BLOCK_CHAIN (decl) = prev;
5581 prev = decl;
5583 return prev;
5586 /* Count the subblocks of the list starting with BLOCK, and list them
5587 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5588 blocks. */
5590 static int
5591 all_blocks (block, vector)
5592 tree block;
5593 tree *vector;
5595 int n_blocks = 0;
5597 while (block)
5599 TREE_ASM_WRITTEN (block) = 0;
5601 /* Record this block. */
5602 if (vector)
5603 vector[n_blocks] = block;
5605 ++n_blocks;
5607 /* Record the subblocks, and their subblocks... */
5608 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5609 vector ? vector + n_blocks : 0);
5610 block = BLOCK_CHAIN (block);
5613 return n_blocks;
5616 /* Allocate a function structure and reset its contents to the defaults. */
5617 static void
5618 prepare_function_start ()
5620 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5622 init_stmt_for_function ();
5623 init_eh_for_function ();
5625 cse_not_expected = ! optimize;
5627 /* Caller save not needed yet. */
5628 caller_save_needed = 0;
5630 /* No stack slots have been made yet. */
5631 stack_slot_list = 0;
5633 current_function_has_nonlocal_label = 0;
5634 current_function_has_nonlocal_goto = 0;
5636 /* There is no stack slot for handling nonlocal gotos. */
5637 nonlocal_goto_handler_slots = 0;
5638 nonlocal_goto_stack_level = 0;
5640 /* No labels have been declared for nonlocal use. */
5641 nonlocal_labels = 0;
5642 nonlocal_goto_handler_labels = 0;
5644 /* No function calls so far in this function. */
5645 function_call_count = 0;
5647 /* No parm regs have been allocated.
5648 (This is important for output_inline_function.) */
5649 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5651 /* Initialize the RTL mechanism. */
5652 init_emit ();
5654 /* Initialize the queue of pending postincrement and postdecrements,
5655 and some other info in expr.c. */
5656 init_expr ();
5658 /* We haven't done register allocation yet. */
5659 reg_renumber = 0;
5661 init_varasm_status (current_function);
5663 /* Clear out data used for inlining. */
5664 current_function->inlinable = 0;
5665 current_function->original_decl_initial = 0;
5666 current_function->original_arg_vector = 0;
5668 /* Set if a call to setjmp is seen. */
5669 current_function_calls_setjmp = 0;
5671 /* Set if a call to longjmp is seen. */
5672 current_function_calls_longjmp = 0;
5674 current_function_calls_alloca = 0;
5675 current_function_contains_functions = 0;
5676 current_function_is_leaf = 0;
5677 current_function_sp_is_unchanging = 0;
5678 current_function_uses_only_leaf_regs = 0;
5679 current_function_has_computed_jump = 0;
5680 current_function_is_thunk = 0;
5682 current_function_returns_pcc_struct = 0;
5683 current_function_returns_struct = 0;
5684 current_function_epilogue_delay_list = 0;
5685 current_function_uses_const_pool = 0;
5686 current_function_uses_pic_offset_table = 0;
5687 current_function_cannot_inline = 0;
5689 /* We have not yet needed to make a label to jump to for tail-recursion. */
5690 tail_recursion_label = 0;
5692 /* We haven't had a need to make a save area for ap yet. */
5693 arg_pointer_save_area = 0;
5695 /* No stack slots allocated yet. */
5696 frame_offset = 0;
5698 /* No SAVE_EXPRs in this function yet. */
5699 save_expr_regs = 0;
5701 /* No RTL_EXPRs in this function yet. */
5702 rtl_expr_chain = 0;
5704 /* Set up to allocate temporaries. */
5705 init_temp_slots ();
5707 /* Indicate that we need to distinguish between the return value of the
5708 present function and the return value of a function being called. */
5709 rtx_equal_function_value_matters = 1;
5711 /* Indicate that we have not instantiated virtual registers yet. */
5712 virtuals_instantiated = 0;
5714 /* Indicate we have no need of a frame pointer yet. */
5715 frame_pointer_needed = 0;
5717 /* By default assume not varargs or stdarg. */
5718 current_function_varargs = 0;
5719 current_function_stdarg = 0;
5721 /* We haven't made any trampolines for this function yet. */
5722 trampoline_list = 0;
5724 init_pending_stack_adjust ();
5725 inhibit_defer_pop = 0;
5727 current_function_outgoing_args_size = 0;
5729 if (init_lang_status)
5730 (*init_lang_status) (current_function);
5731 if (init_machine_status)
5732 (*init_machine_status) (current_function);
5735 /* Initialize the rtl expansion mechanism so that we can do simple things
5736 like generate sequences. This is used to provide a context during global
5737 initialization of some passes. */
5738 void
5739 init_dummy_function_start ()
5741 prepare_function_start ();
5744 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5745 and initialize static variables for generating RTL for the statements
5746 of the function. */
5748 void
5749 init_function_start (subr, filename, line)
5750 tree subr;
5751 char *filename;
5752 int line;
5754 prepare_function_start ();
5756 /* Remember this function for later. */
5757 current_function->next_global = all_functions;
5758 all_functions = current_function;
5760 current_function_name = (*decl_printable_name) (subr, 2);
5761 current_function->decl = subr;
5763 /* Nonzero if this is a nested function that uses a static chain. */
5765 current_function_needs_context
5766 = (decl_function_context (current_function_decl) != 0
5767 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5769 /* Within function body, compute a type's size as soon it is laid out. */
5770 immediate_size_expand++;
5772 /* Prevent ever trying to delete the first instruction of a function.
5773 Also tell final how to output a linenum before the function prologue.
5774 Note linenums could be missing, e.g. when compiling a Java .class file. */
5775 if (line > 0)
5776 emit_line_note (filename, line);
5778 /* Make sure first insn is a note even if we don't want linenums.
5779 This makes sure the first insn will never be deleted.
5780 Also, final expects a note to appear there. */
5781 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5783 /* Set flags used by final.c. */
5784 if (aggregate_value_p (DECL_RESULT (subr)))
5786 #ifdef PCC_STATIC_STRUCT_RETURN
5787 current_function_returns_pcc_struct = 1;
5788 #endif
5789 current_function_returns_struct = 1;
5792 /* Warn if this value is an aggregate type,
5793 regardless of which calling convention we are using for it. */
5794 if (warn_aggregate_return
5795 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5796 warning ("function returns an aggregate");
5798 current_function_returns_pointer
5799 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5802 /* Make sure all values used by the optimization passes have sane
5803 defaults. */
5804 void
5805 init_function_for_compilation ()
5807 reg_renumber = 0;
5808 /* No prologue/epilogue insns yet. */
5809 prologue = epilogue = 0;
5812 /* Indicate that the current function uses extra args
5813 not explicitly mentioned in the argument list in any fashion. */
5815 void
5816 mark_varargs ()
5818 current_function_varargs = 1;
5821 /* Expand a call to __main at the beginning of a possible main function. */
5823 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5824 #undef HAS_INIT_SECTION
5825 #define HAS_INIT_SECTION
5826 #endif
5828 void
5829 expand_main_function ()
5831 #if !defined (HAS_INIT_SECTION)
5832 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5833 VOIDmode, 0);
5834 #endif /* not HAS_INIT_SECTION */
5837 extern struct obstack permanent_obstack;
5839 /* Start the RTL for a new function, and set variables used for
5840 emitting RTL.
5841 SUBR is the FUNCTION_DECL node.
5842 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5843 the function's parameters, which must be run at any return statement. */
5845 void
5846 expand_function_start (subr, parms_have_cleanups)
5847 tree subr;
5848 int parms_have_cleanups;
5850 register int i;
5851 tree tem;
5852 rtx last_ptr = NULL_RTX;
5854 /* Make sure volatile mem refs aren't considered
5855 valid operands of arithmetic insns. */
5856 init_recog_no_volatile ();
5858 /* Set this before generating any memory accesses. */
5859 current_function_check_memory_usage
5860 = (flag_check_memory_usage
5861 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5863 current_function_instrument_entry_exit
5864 = (flag_instrument_function_entry_exit
5865 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5867 /* If function gets a static chain arg, store it in the stack frame.
5868 Do this first, so it gets the first stack slot offset. */
5869 if (current_function_needs_context)
5871 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5873 /* Delay copying static chain if it is not a register to avoid
5874 conflicts with regs used for parameters. */
5875 if (! SMALL_REGISTER_CLASSES
5876 || GET_CODE (static_chain_incoming_rtx) == REG)
5877 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5880 /* If the parameters of this function need cleaning up, get a label
5881 for the beginning of the code which executes those cleanups. This must
5882 be done before doing anything with return_label. */
5883 if (parms_have_cleanups)
5884 cleanup_label = gen_label_rtx ();
5885 else
5886 cleanup_label = 0;
5888 /* Make the label for return statements to jump to, if this machine
5889 does not have a one-instruction return and uses an epilogue,
5890 or if it returns a structure, or if it has parm cleanups. */
5891 #ifdef HAVE_return
5892 if (cleanup_label == 0 && HAVE_return
5893 && ! current_function_instrument_entry_exit
5894 && ! current_function_returns_pcc_struct
5895 && ! (current_function_returns_struct && ! optimize))
5896 return_label = 0;
5897 else
5898 return_label = gen_label_rtx ();
5899 #else
5900 return_label = gen_label_rtx ();
5901 #endif
5903 /* Initialize rtx used to return the value. */
5904 /* Do this before assign_parms so that we copy the struct value address
5905 before any library calls that assign parms might generate. */
5907 /* Decide whether to return the value in memory or in a register. */
5908 if (aggregate_value_p (DECL_RESULT (subr)))
5910 /* Returning something that won't go in a register. */
5911 register rtx value_address = 0;
5913 #ifdef PCC_STATIC_STRUCT_RETURN
5914 if (current_function_returns_pcc_struct)
5916 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5917 value_address = assemble_static_space (size);
5919 else
5920 #endif
5922 /* Expect to be passed the address of a place to store the value.
5923 If it is passed as an argument, assign_parms will take care of
5924 it. */
5925 if (struct_value_incoming_rtx)
5927 value_address = gen_reg_rtx (Pmode);
5928 emit_move_insn (value_address, struct_value_incoming_rtx);
5931 if (value_address)
5933 DECL_RTL (DECL_RESULT (subr))
5934 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5935 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5936 AGGREGATE_TYPE_P (TREE_TYPE
5937 (DECL_RESULT
5938 (subr))));
5941 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5942 /* If return mode is void, this decl rtl should not be used. */
5943 DECL_RTL (DECL_RESULT (subr)) = 0;
5944 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5946 /* If function will end with cleanup code for parms,
5947 compute the return values into a pseudo reg,
5948 which we will copy into the true return register
5949 after the cleanups are done. */
5951 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5953 #ifdef PROMOTE_FUNCTION_RETURN
5954 tree type = TREE_TYPE (DECL_RESULT (subr));
5955 int unsignedp = TREE_UNSIGNED (type);
5957 mode = promote_mode (type, mode, &unsignedp, 1);
5958 #endif
5960 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5962 else
5963 /* Scalar, returned in a register. */
5965 #ifdef FUNCTION_OUTGOING_VALUE
5966 DECL_RTL (DECL_RESULT (subr))
5967 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5968 #else
5969 DECL_RTL (DECL_RESULT (subr))
5970 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5971 #endif
5973 /* Mark this reg as the function's return value. */
5974 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5976 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5977 /* Needed because we may need to move this to memory
5978 in case it's a named return value whose address is taken. */
5979 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5983 /* Initialize rtx for parameters and local variables.
5984 In some cases this requires emitting insns. */
5986 assign_parms (subr);
5988 /* Copy the static chain now if it wasn't a register. The delay is to
5989 avoid conflicts with the parameter passing registers. */
5991 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5992 if (GET_CODE (static_chain_incoming_rtx) != REG)
5993 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5995 /* The following was moved from init_function_start.
5996 The move is supposed to make sdb output more accurate. */
5997 /* Indicate the beginning of the function body,
5998 as opposed to parm setup. */
5999 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6001 /* If doing stupid allocation, mark parms as born here. */
6003 if (GET_CODE (get_last_insn ()) != NOTE)
6004 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6005 parm_birth_insn = get_last_insn ();
6007 if (obey_regdecls)
6009 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6010 use_variable (regno_reg_rtx[i]);
6012 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6013 use_variable (current_function_internal_arg_pointer);
6016 context_display = 0;
6017 if (current_function_needs_context)
6019 /* Fetch static chain values for containing functions. */
6020 tem = decl_function_context (current_function_decl);
6021 /* If not doing stupid register allocation copy the static chain
6022 pointer into a pseudo. If we have small register classes, copy
6023 the value from memory if static_chain_incoming_rtx is a REG. If
6024 we do stupid register allocation, we use the stack address
6025 generated above. */
6026 if (tem && ! obey_regdecls)
6028 /* If the static chain originally came in a register, put it back
6029 there, then move it out in the next insn. The reason for
6030 this peculiar code is to satisfy function integration. */
6031 if (SMALL_REGISTER_CLASSES
6032 && GET_CODE (static_chain_incoming_rtx) == REG)
6033 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6034 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6037 while (tem)
6039 tree rtlexp = make_node (RTL_EXPR);
6041 RTL_EXPR_RTL (rtlexp) = last_ptr;
6042 context_display = tree_cons (tem, rtlexp, context_display);
6043 tem = decl_function_context (tem);
6044 if (tem == 0)
6045 break;
6046 /* Chain thru stack frames, assuming pointer to next lexical frame
6047 is found at the place we always store it. */
6048 #ifdef FRAME_GROWS_DOWNWARD
6049 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6050 #endif
6051 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6052 memory_address (Pmode,
6053 last_ptr)));
6055 /* If we are not optimizing, ensure that we know that this
6056 piece of context is live over the entire function. */
6057 if (! optimize)
6058 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6059 save_expr_regs);
6063 if (current_function_instrument_entry_exit)
6065 rtx fun = DECL_RTL (current_function_decl);
6066 if (GET_CODE (fun) == MEM)
6067 fun = XEXP (fun, 0);
6068 else
6069 abort ();
6070 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6071 fun, Pmode,
6072 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6074 hard_frame_pointer_rtx),
6075 Pmode);
6078 /* After the display initializations is where the tail-recursion label
6079 should go, if we end up needing one. Ensure we have a NOTE here
6080 since some things (like trampolines) get placed before this. */
6081 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6083 /* Evaluate now the sizes of any types declared among the arguments. */
6084 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6086 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6087 EXPAND_MEMORY_USE_BAD);
6088 /* Flush the queue in case this parameter declaration has
6089 side-effects. */
6090 emit_queue ();
6093 /* Make sure there is a line number after the function entry setup code. */
6094 force_next_line_note ();
6097 /* Undo the effects of init_dummy_function_start. */
6098 void
6099 expand_dummy_function_end ()
6101 /* End any sequences that failed to be closed due to syntax errors. */
6102 while (in_sequence_p ())
6103 end_sequence ();
6105 /* Outside function body, can't compute type's actual size
6106 until next function's body starts. */
6108 free_after_parsing (current_function);
6109 free_after_compilation (current_function);
6110 free (current_function);
6111 current_function = 0;
6114 /* Generate RTL for the end of the current function.
6115 FILENAME and LINE are the current position in the source file.
6117 It is up to language-specific callers to do cleanups for parameters--
6118 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6120 void
6121 expand_function_end (filename, line, end_bindings)
6122 char *filename;
6123 int line;
6124 int end_bindings;
6126 register int i;
6127 tree link;
6129 #ifdef TRAMPOLINE_TEMPLATE
6130 static rtx initial_trampoline;
6131 #endif
6133 finish_expr_for_function ();
6135 #ifdef NON_SAVING_SETJMP
6136 /* Don't put any variables in registers if we call setjmp
6137 on a machine that fails to restore the registers. */
6138 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6140 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6141 setjmp_protect (DECL_INITIAL (current_function_decl));
6143 setjmp_protect_args ();
6145 #endif
6147 /* Save the argument pointer if a save area was made for it. */
6148 if (arg_pointer_save_area)
6150 /* arg_pointer_save_area may not be a valid memory address, so we
6151 have to check it and fix it if necessary. */
6152 rtx seq;
6153 start_sequence ();
6154 emit_move_insn (validize_mem (arg_pointer_save_area),
6155 virtual_incoming_args_rtx);
6156 seq = gen_sequence ();
6157 end_sequence ();
6158 emit_insn_before (seq, tail_recursion_reentry);
6161 /* Initialize any trampolines required by this function. */
6162 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6164 tree function = TREE_PURPOSE (link);
6165 rtx context = lookup_static_chain (function);
6166 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6167 #ifdef TRAMPOLINE_TEMPLATE
6168 rtx blktramp;
6169 #endif
6170 rtx seq;
6172 #ifdef TRAMPOLINE_TEMPLATE
6173 /* First make sure this compilation has a template for
6174 initializing trampolines. */
6175 if (initial_trampoline == 0)
6177 end_temporary_allocation ();
6178 initial_trampoline
6179 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6180 resume_temporary_allocation ();
6182 ggc_add_rtx_root (&initial_trampoline, 1);
6184 #endif
6186 /* Generate insns to initialize the trampoline. */
6187 start_sequence ();
6188 tramp = round_trampoline_addr (XEXP (tramp, 0));
6189 #ifdef TRAMPOLINE_TEMPLATE
6190 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6191 emit_block_move (blktramp, initial_trampoline,
6192 GEN_INT (TRAMPOLINE_SIZE),
6193 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6194 #endif
6195 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6196 seq = get_insns ();
6197 end_sequence ();
6199 /* Put those insns at entry to the containing function (this one). */
6200 emit_insns_before (seq, tail_recursion_reentry);
6203 /* If we are doing stack checking and this function makes calls,
6204 do a stack probe at the start of the function to ensure we have enough
6205 space for another stack frame. */
6206 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6208 rtx insn, seq;
6210 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6211 if (GET_CODE (insn) == CALL_INSN)
6213 start_sequence ();
6214 probe_stack_range (STACK_CHECK_PROTECT,
6215 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6216 seq = get_insns ();
6217 end_sequence ();
6218 emit_insns_before (seq, tail_recursion_reentry);
6219 break;
6223 /* Warn about unused parms if extra warnings were specified. */
6224 if (warn_unused && extra_warnings)
6226 tree decl;
6228 for (decl = DECL_ARGUMENTS (current_function_decl);
6229 decl; decl = TREE_CHAIN (decl))
6230 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6231 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6232 warning_with_decl (decl, "unused parameter `%s'");
6235 /* Delete handlers for nonlocal gotos if nothing uses them. */
6236 if (nonlocal_goto_handler_slots != 0
6237 && ! current_function_has_nonlocal_label)
6238 delete_handlers ();
6240 /* End any sequences that failed to be closed due to syntax errors. */
6241 while (in_sequence_p ())
6242 end_sequence ();
6244 /* Outside function body, can't compute type's actual size
6245 until next function's body starts. */
6246 immediate_size_expand--;
6248 /* If doing stupid register allocation,
6249 mark register parms as dying here. */
6251 if (obey_regdecls)
6253 rtx tem;
6254 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6255 use_variable (regno_reg_rtx[i]);
6257 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6259 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6261 use_variable (XEXP (tem, 0));
6262 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6265 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6266 use_variable (current_function_internal_arg_pointer);
6269 clear_pending_stack_adjust ();
6270 do_pending_stack_adjust ();
6272 /* Mark the end of the function body.
6273 If control reaches this insn, the function can drop through
6274 without returning a value. */
6275 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6277 /* Must mark the last line number note in the function, so that the test
6278 coverage code can avoid counting the last line twice. This just tells
6279 the code to ignore the immediately following line note, since there
6280 already exists a copy of this note somewhere above. This line number
6281 note is still needed for debugging though, so we can't delete it. */
6282 if (flag_test_coverage)
6283 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6285 /* Output a linenumber for the end of the function.
6286 SDB depends on this. */
6287 emit_line_note_force (filename, line);
6289 /* Output the label for the actual return from the function,
6290 if one is expected. This happens either because a function epilogue
6291 is used instead of a return instruction, or because a return was done
6292 with a goto in order to run local cleanups, or because of pcc-style
6293 structure returning. */
6295 if (return_label)
6296 emit_label (return_label);
6298 /* C++ uses this. */
6299 if (end_bindings)
6300 expand_end_bindings (0, 0, 0);
6302 /* Now handle any leftover exception regions that may have been
6303 created for the parameters. */
6305 rtx last = get_last_insn ();
6306 rtx label;
6308 expand_leftover_cleanups ();
6310 /* If the above emitted any code, may sure we jump around it. */
6311 if (last != get_last_insn ())
6313 label = gen_label_rtx ();
6314 last = emit_jump_insn_after (gen_jump (label), last);
6315 last = emit_barrier_after (last);
6316 emit_label (label);
6320 if (current_function_instrument_entry_exit)
6322 rtx fun = DECL_RTL (current_function_decl);
6323 if (GET_CODE (fun) == MEM)
6324 fun = XEXP (fun, 0);
6325 else
6326 abort ();
6327 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6328 fun, Pmode,
6329 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6331 hard_frame_pointer_rtx),
6332 Pmode);
6335 /* If we had calls to alloca, and this machine needs
6336 an accurate stack pointer to exit the function,
6337 insert some code to save and restore the stack pointer. */
6338 #ifdef EXIT_IGNORE_STACK
6339 if (! EXIT_IGNORE_STACK)
6340 #endif
6341 if (current_function_calls_alloca)
6343 rtx tem = 0;
6345 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6346 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6349 /* If scalar return value was computed in a pseudo-reg,
6350 copy that to the hard return register. */
6351 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6352 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6353 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6354 >= FIRST_PSEUDO_REGISTER))
6356 rtx real_decl_result;
6358 #ifdef FUNCTION_OUTGOING_VALUE
6359 real_decl_result
6360 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6361 current_function_decl);
6362 #else
6363 real_decl_result
6364 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6365 current_function_decl);
6366 #endif
6367 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6368 /* If this is a BLKmode structure being returned in registers, then use
6369 the mode computed in expand_return. */
6370 if (GET_MODE (real_decl_result) == BLKmode)
6371 PUT_MODE (real_decl_result,
6372 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6373 emit_move_insn (real_decl_result,
6374 DECL_RTL (DECL_RESULT (current_function_decl)));
6375 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6377 /* The delay slot scheduler assumes that current_function_return_rtx
6378 holds the hard register containing the return value, not a temporary
6379 pseudo. */
6380 current_function_return_rtx = real_decl_result;
6383 /* If returning a structure, arrange to return the address of the value
6384 in a place where debuggers expect to find it.
6386 If returning a structure PCC style,
6387 the caller also depends on this value.
6388 And current_function_returns_pcc_struct is not necessarily set. */
6389 if (current_function_returns_struct
6390 || current_function_returns_pcc_struct)
6392 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6393 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6394 #ifdef FUNCTION_OUTGOING_VALUE
6395 rtx outgoing
6396 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6397 current_function_decl);
6398 #else
6399 rtx outgoing
6400 = FUNCTION_VALUE (build_pointer_type (type),
6401 current_function_decl);
6402 #endif
6404 /* Mark this as a function return value so integrate will delete the
6405 assignment and USE below when inlining this function. */
6406 REG_FUNCTION_VALUE_P (outgoing) = 1;
6408 emit_move_insn (outgoing, value_address);
6409 use_variable (outgoing);
6412 /* If this is an implementation of __throw, do what's necessary to
6413 communicate between __builtin_eh_return and the epilogue. */
6414 expand_eh_return ();
6416 /* Output a return insn if we are using one.
6417 Otherwise, let the rtl chain end here, to drop through
6418 into the epilogue. */
6420 #ifdef HAVE_return
6421 if (HAVE_return)
6423 emit_jump_insn (gen_return ());
6424 emit_barrier ();
6426 #endif
6428 /* Fix up any gotos that jumped out to the outermost
6429 binding level of the function.
6430 Must follow emitting RETURN_LABEL. */
6432 /* If you have any cleanups to do at this point,
6433 and they need to create temporary variables,
6434 then you will lose. */
6435 expand_fixups (get_insns ());
6438 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6439 or a single insn). */
6441 static int *
6442 record_insns (insns)
6443 rtx insns;
6445 int *vec;
6447 if (GET_CODE (insns) == SEQUENCE)
6449 int len = XVECLEN (insns, 0);
6450 vec = (int *) oballoc ((len + 1) * sizeof (int));
6451 vec[len] = 0;
6452 while (--len >= 0)
6453 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6455 else
6457 vec = (int *) oballoc (2 * sizeof (int));
6458 vec[0] = INSN_UID (insns);
6459 vec[1] = 0;
6461 return vec;
6464 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6466 static int
6467 contains (insn, vec)
6468 rtx insn;
6469 int *vec;
6471 register int i, j;
6473 if (GET_CODE (insn) == INSN
6474 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6476 int count = 0;
6477 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6478 for (j = 0; vec[j]; j++)
6479 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6480 count++;
6481 return count;
6483 else
6485 for (j = 0; vec[j]; j++)
6486 if (INSN_UID (insn) == vec[j])
6487 return 1;
6489 return 0;
6493 prologue_epilogue_contains (insn)
6494 rtx insn;
6496 if (prologue && contains (insn, prologue))
6497 return 1;
6498 if (epilogue && contains (insn, epilogue))
6499 return 1;
6500 return 0;
6503 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6504 this into place with notes indicating where the prologue ends and where
6505 the epilogue begins. Update the basic block information when possible. */
6507 void
6508 thread_prologue_and_epilogue_insns (f)
6509 rtx f ATTRIBUTE_UNUSED;
6511 int insertted = 0;
6513 #ifdef HAVE_prologue
6514 if (HAVE_prologue)
6516 rtx seq;
6518 start_sequence ();
6519 seq = gen_prologue();
6520 emit_insn (seq);
6522 /* Retain a map of the prologue insns. */
6523 if (GET_CODE (seq) != SEQUENCE)
6524 seq = get_insns ();
6525 prologue = record_insns (seq);
6527 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6528 seq = gen_sequence ();
6529 end_sequence ();
6531 /* If optimization is off, and perhaps in an empty function,
6532 the entry block will have no successors. */
6533 if (ENTRY_BLOCK_PTR->succ)
6535 /* Can't deal with multiple successsors of the entry block. */
6536 if (ENTRY_BLOCK_PTR->succ->succ_next)
6537 abort ();
6539 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6540 insertted = 1;
6542 else
6543 emit_insn_after (seq, f);
6545 #endif
6547 #ifdef HAVE_epilogue
6548 if (HAVE_epilogue)
6550 edge e;
6551 basic_block bb = 0;
6552 rtx tail = get_last_insn ();
6554 /* ??? This is gastly. If function returns were not done via uses,
6555 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6556 and all of this uglyness would go away. */
6558 switch (optimize)
6560 default:
6561 /* If the exit block has no non-fake predecessors, we don't
6562 need an epilogue. Furthermore, only pay attention to the
6563 fallthru predecessors; if (conditional) return insns were
6564 generated, by definition we do not need to emit epilogue
6565 insns. */
6567 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6568 if ((e->flags & EDGE_FAKE) == 0
6569 && (e->flags & EDGE_FALLTHRU) != 0)
6570 break;
6571 if (e == NULL)
6572 break;
6574 /* We can't handle multiple epilogues -- if one is needed,
6575 we won't be able to place it multiple times.
6577 ??? Fix epilogue expanders to not assume they are the
6578 last thing done compiling the function. Either that
6579 or copy_rtx each insn.
6581 ??? Blah, it's not a simple expression to assert that
6582 we've exactly one fallthru exit edge. */
6584 bb = e->src;
6585 tail = bb->end;
6587 /* ??? If the last insn of the basic block is a jump, then we
6588 are creating a new basic block. Wimp out and leave these
6589 insns outside any block. */
6590 if (GET_CODE (tail) == JUMP_INSN)
6591 bb = 0;
6593 /* FALLTHRU */
6594 case 0:
6596 rtx prev, seq, first_use;
6598 /* Move the USE insns at the end of a function onto a list. */
6599 prev = tail;
6600 if (GET_CODE (prev) == BARRIER
6601 || GET_CODE (prev) == NOTE)
6602 prev = prev_nonnote_insn (prev);
6604 first_use = 0;
6605 if (prev
6606 && GET_CODE (prev) == INSN
6607 && GET_CODE (PATTERN (prev)) == USE)
6609 /* If the end of the block is the use, grab hold of something
6610 else so that we emit barriers etc in the right place. */
6611 if (prev == tail)
6614 tail = PREV_INSN (tail);
6615 while (GET_CODE (tail) == INSN
6616 && GET_CODE (PATTERN (tail)) == USE);
6621 rtx use = prev;
6622 prev = prev_nonnote_insn (prev);
6624 remove_insn (use);
6625 if (first_use)
6627 NEXT_INSN (use) = first_use;
6628 PREV_INSN (first_use) = use;
6630 else
6631 NEXT_INSN (use) = NULL_RTX;
6632 first_use = use;
6634 while (prev
6635 && GET_CODE (prev) == INSN
6636 && GET_CODE (PATTERN (prev)) == USE);
6639 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6640 epilogue insns, the USE insns at the end of a function,
6641 the jump insn that returns, and then a BARRIER. */
6643 if (GET_CODE (tail) != BARRIER)
6645 prev = next_nonnote_insn (tail);
6646 if (!prev || GET_CODE (prev) != BARRIER)
6647 emit_barrier_after (tail);
6650 seq = gen_epilogue ();
6651 prev = tail;
6652 tail = emit_jump_insn_after (seq, tail);
6654 /* Insert the USE insns immediately before the return insn, which
6655 must be the last instruction emitted in the sequence. */
6656 if (first_use)
6657 emit_insns_before (first_use, tail);
6658 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6660 /* Update the tail of the basic block. */
6661 if (bb)
6662 bb->end = tail;
6664 /* Retain a map of the epilogue insns. */
6665 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6669 #endif
6671 if (insertted)
6672 commit_edge_insertions ();
6675 /* Reposition the prologue-end and epilogue-begin notes after instruction
6676 scheduling and delayed branch scheduling. */
6678 void
6679 reposition_prologue_and_epilogue_notes (f)
6680 rtx f ATTRIBUTE_UNUSED;
6682 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6683 /* Reposition the prologue and epilogue notes. */
6684 if (n_basic_blocks)
6686 int len;
6688 if (prologue)
6690 register rtx insn, note = 0;
6692 /* Scan from the beginning until we reach the last prologue insn.
6693 We apparently can't depend on basic_block_{head,end} after
6694 reorg has run. */
6695 for (len = 0; prologue[len]; len++)
6697 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6699 if (GET_CODE (insn) == NOTE)
6701 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6702 note = insn;
6704 else if ((len -= contains (insn, prologue)) == 0)
6706 rtx next;
6707 /* Find the prologue-end note if we haven't already, and
6708 move it to just after the last prologue insn. */
6709 if (note == 0)
6711 for (note = insn; (note = NEXT_INSN (note));)
6712 if (GET_CODE (note) == NOTE
6713 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6714 break;
6717 next = NEXT_INSN (note);
6719 /* Whether or not we can depend on BLOCK_HEAD,
6720 attempt to keep it up-to-date. */
6721 if (BLOCK_HEAD (0) == note)
6722 BLOCK_HEAD (0) = next;
6724 remove_insn (note);
6725 add_insn_after (note, insn);
6730 if (epilogue)
6732 register rtx insn, note = 0;
6734 /* Scan from the end until we reach the first epilogue insn.
6735 We apparently can't depend on basic_block_{head,end} after
6736 reorg has run. */
6737 for (len = 0; epilogue[len]; len++)
6739 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6741 if (GET_CODE (insn) == NOTE)
6743 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6744 note = insn;
6746 else if ((len -= contains (insn, epilogue)) == 0)
6748 /* Find the epilogue-begin note if we haven't already, and
6749 move it to just before the first epilogue insn. */
6750 if (note == 0)
6752 for (note = insn; (note = PREV_INSN (note));)
6753 if (GET_CODE (note) == NOTE
6754 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6755 break;
6758 /* Whether or not we can depend on BLOCK_HEAD,
6759 attempt to keep it up-to-date. */
6760 if (n_basic_blocks
6761 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6762 BLOCK_HEAD (n_basic_blocks-1) = note;
6764 remove_insn (note);
6765 add_insn_before (note, insn);
6770 #endif /* HAVE_prologue or HAVE_epilogue */
6773 /* Mark T for GC. */
6775 static void
6776 mark_temp_slot (t)
6777 struct temp_slot *t;
6779 while (t)
6781 ggc_mark_rtx (t->slot);
6782 ggc_mark_rtx (t->address);
6783 ggc_mark_tree (t->rtl_expr);
6785 t = t->next;
6789 /* Mark P for GC. */
6791 static void
6792 mark_function_status (p)
6793 struct function *p;
6795 int i;
6796 rtx *r;
6798 if (p == 0)
6799 return;
6801 ggc_mark_rtx (p->arg_offset_rtx);
6803 if (p->x_parm_reg_stack_loc)
6804 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6805 i > 0; --i, ++r)
6806 ggc_mark_rtx (*r);
6808 ggc_mark_rtx (p->return_rtx);
6809 ggc_mark_rtx (p->x_cleanup_label);
6810 ggc_mark_rtx (p->x_return_label);
6811 ggc_mark_rtx (p->x_save_expr_regs);
6812 ggc_mark_rtx (p->x_stack_slot_list);
6813 ggc_mark_rtx (p->x_parm_birth_insn);
6814 ggc_mark_rtx (p->x_tail_recursion_label);
6815 ggc_mark_rtx (p->x_tail_recursion_reentry);
6816 ggc_mark_rtx (p->internal_arg_pointer);
6817 ggc_mark_rtx (p->x_arg_pointer_save_area);
6818 ggc_mark_tree (p->x_rtl_expr_chain);
6819 ggc_mark_rtx (p->x_last_parm_insn);
6820 ggc_mark_tree (p->x_context_display);
6821 ggc_mark_tree (p->x_trampoline_list);
6822 ggc_mark_rtx (p->epilogue_delay_list);
6824 mark_temp_slot (p->x_temp_slots);
6827 struct var_refs_queue *q = p->fixup_var_refs_queue;
6828 while (q)
6830 ggc_mark_rtx (q->modified);
6831 q = q->next;
6835 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6836 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6837 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6838 ggc_mark_tree (p->x_nonlocal_labels);
6841 /* Mark the function chain ARG (which is really a struct function **)
6842 for GC. */
6844 static void
6845 mark_function_chain (arg)
6846 void *arg;
6848 struct function *f = *(struct function **) arg;
6850 for (; f; f = f->next_global)
6852 ggc_mark_tree (f->decl);
6854 mark_function_status (f);
6855 mark_eh_status (f->eh);
6856 mark_stmt_status (f->stmt);
6857 mark_expr_status (f->expr);
6858 mark_emit_status (f->emit);
6859 mark_varasm_status (f->varasm);
6861 if (mark_machine_status)
6862 (*mark_machine_status) (f);
6863 if (mark_lang_status)
6864 (*mark_lang_status) (f);
6866 if (f->original_arg_vector)
6867 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6868 if (f->original_decl_initial)
6869 ggc_mark_tree (f->original_decl_initial);
6873 /* Called once, at initialization, to initialize function.c. */
6875 void
6876 init_function_once ()
6878 ggc_add_root (&all_functions, 1, sizeof all_functions,
6879 mark_function_chain);