allow all arm targets to use -mstructure-size-boundary=XX
[official-gcc.git] / gcc / function.c
blob890f62bdbae8b56ef74de6cba9042529e5e66eca
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
71 /* Some systems use __main in a way incompatible with its use in gcc, in these
72 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73 give the same symbol without quotes for an alternative entry point. You
74 must define both, or neither. */
75 #ifndef NAME__MAIN
76 #define NAME__MAIN "__main"
77 #define SYMBOL__MAIN __main
78 #endif
80 /* Round a value to the lowest integer less than it that is a multiple of
81 the required alignment. Avoid using division in case the value is
82 negative. Assume the alignment is a power of two. */
83 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
85 /* Similar, but round to the next highest integer that meets the
86 alignment. */
87 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
89 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90 during rtl generation. If they are different register numbers, this is
91 always true. It may also be true if
92 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93 generation. See fix_lexical_addr for details. */
95 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96 #define NEED_SEPARATE_AP
97 #endif
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 life_analysis has run. */
108 int current_function_sp_is_unchanging;
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
117 static int virtuals_instantiated;
119 /* These variables hold pointers to functions to
120 save and restore machine-specific data,
121 in push_function_context and pop_function_context. */
122 void (*init_machine_status) PROTO((struct function *));
123 void (*save_machine_status) PROTO((struct function *));
124 void (*restore_machine_status) PROTO((struct function *));
125 void (*mark_machine_status) PROTO((struct function *));
126 void (*free_machine_status) PROTO((struct function *));
128 /* Likewise, but for language-specific data. */
129 void (*init_lang_status) PROTO((struct function *));
130 void (*save_lang_status) PROTO((struct function *));
131 void (*restore_lang_status) PROTO((struct function *));
132 void (*mark_lang_status) PROTO((struct function *));
133 void (*free_lang_status) PROTO((struct function *));
135 /* The FUNCTION_DECL for an inline function currently being expanded. */
136 tree inline_function_decl;
138 /* The currently compiled function. */
139 struct function *current_function = 0;
141 /* Global list of all compiled functions. */
142 struct function *all_functions = 0;
144 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
145 static int *prologue;
146 static int *epilogue;
148 /* In order to evaluate some expressions, such as function calls returning
149 structures in memory, we need to temporarily allocate stack locations.
150 We record each allocated temporary in the following structure.
152 Associated with each temporary slot is a nesting level. When we pop up
153 one level, all temporaries associated with the previous level are freed.
154 Normally, all temporaries are freed after the execution of the statement
155 in which they were created. However, if we are inside a ({...}) grouping,
156 the result may be in a temporary and hence must be preserved. If the
157 result could be in a temporary, we preserve it if we can determine which
158 one it is in. If we cannot determine which temporary may contain the
159 result, all temporaries are preserved. A temporary is preserved by
160 pretending it was allocated at the previous nesting level.
162 Automatic variables are also assigned temporary slots, at the nesting
163 level where they are defined. They are marked a "kept" so that
164 free_temp_slots will not free them. */
166 struct temp_slot
168 /* Points to next temporary slot. */
169 struct temp_slot *next;
170 /* The rtx to used to reference the slot. */
171 rtx slot;
172 /* The rtx used to represent the address if not the address of the
173 slot above. May be an EXPR_LIST if multiple addresses exist. */
174 rtx address;
175 /* The alignment (in bits) of the slot. */
176 int align;
177 /* The size, in units, of the slot. */
178 HOST_WIDE_INT size;
179 /* The alias set for the slot. If the alias set is zero, we don't
180 know anything about the alias set of the slot. We must only
181 reuse a slot if it is assigned an object of the same alias set.
182 Otherwise, the rest of the compiler may assume that the new use
183 of the slot cannot alias the old use of the slot, which is
184 false. If the slot has alias set zero, then we can't reuse the
185 slot at all, since we have no idea what alias set may have been
186 imposed on the memory. For example, if the stack slot is the
187 call frame for an inline functioned, we have no idea what alias
188 sets will be assigned to various pieces of the call frame. */
189 int alias_set;
190 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
191 tree rtl_expr;
192 /* Non-zero if this temporary is currently in use. */
193 char in_use;
194 /* Non-zero if this temporary has its address taken. */
195 char addr_taken;
196 /* Nesting level at which this slot is being used. */
197 int level;
198 /* Non-zero if this should survive a call to free_temp_slots. */
199 int keep;
200 /* The offset of the slot from the frame_pointer, including extra space
201 for alignment. This info is for combine_temp_slots. */
202 HOST_WIDE_INT base_offset;
203 /* The size of the slot, including extra space for alignment. This
204 info is for combine_temp_slots. */
205 HOST_WIDE_INT full_size;
208 /* This structure is used to record MEMs or pseudos used to replace VAR, any
209 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
210 maintain this list in case two operands of an insn were required to match;
211 in that case we must ensure we use the same replacement. */
213 struct fixup_replacement
215 rtx old;
216 rtx new;
217 struct fixup_replacement *next;
220 struct insns_for_mem_entry {
221 /* The KEY in HE will be a MEM. */
222 struct hash_entry he;
223 /* These are the INSNS which reference the MEM. */
224 rtx insns;
227 /* Forward declarations. */
229 static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
230 int, struct function *));
231 static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
232 int, tree));
233 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
234 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
235 enum machine_mode, enum machine_mode,
236 int, int, int,
237 struct hash_table *));
238 static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
239 struct hash_table *));
240 static struct fixup_replacement
241 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
242 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
243 rtx, int, struct hash_table *));
244 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
245 struct fixup_replacement **));
246 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
247 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
248 static rtx fixup_stack_1 PROTO((rtx, rtx));
249 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
250 static void instantiate_decls PROTO((tree, int));
251 static void instantiate_decls_1 PROTO((tree, int));
252 static void instantiate_decl PROTO((rtx, int, int));
253 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
254 static void delete_handlers PROTO((void));
255 static void pad_to_arg_alignment PROTO((struct args_size *, int));
256 #ifndef ARGS_GROW_DOWNWARD
257 static void pad_below PROTO((struct args_size *, enum machine_mode,
258 tree));
259 #endif
260 #ifdef ARGS_GROW_DOWNWARD
261 static tree round_down PROTO((tree, int));
262 #endif
263 static rtx round_trampoline_addr PROTO((rtx));
264 static tree blocks_nreverse PROTO((tree));
265 static int all_blocks PROTO((tree, tree *));
266 /* We always define `record_insns' even if its not used so that we
267 can always export `prologue_epilogue_contains'. */
268 static int *record_insns PROTO((rtx)) ATTRIBUTE_UNUSED;
269 static int contains PROTO((rtx, int *));
270 static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
271 static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
272 struct hash_table *));
273 static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
274 struct hash_table *,
275 hash_table_key));
276 static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
277 static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
278 static int insns_for_mem_walk PROTO ((rtx *, void *));
279 static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
280 static void mark_temp_slot PROTO ((struct temp_slot *));
281 static void mark_function_status PROTO ((struct function *));
282 static void mark_function_chain PROTO ((void *));
283 static void prepare_function_start PROTO ((void));
286 /* Pointer to chain of `struct function' for containing functions. */
287 struct function *outer_function_chain;
289 /* Given a function decl for a containing function,
290 return the `struct function' for it. */
292 struct function *
293 find_function_data (decl)
294 tree decl;
296 struct function *p;
298 for (p = outer_function_chain; p; p = p->next)
299 if (p->decl == decl)
300 return p;
302 abort ();
305 /* Save the current context for compilation of a nested function.
306 This is called from language-specific code. The caller should use
307 the save_lang_status callback to save any language-specific state,
308 since this function knows only about language-independent
309 variables. */
311 void
312 push_function_context_to (context)
313 tree context;
315 struct function *p, *context_data;
317 if (context)
319 context_data = (context == current_function_decl
320 ? current_function
321 : find_function_data (context));
322 context_data->contains_functions = 1;
325 if (current_function == 0)
326 init_dummy_function_start ();
327 p = current_function;
329 p->next = outer_function_chain;
330 outer_function_chain = p;
331 p->fixup_var_refs_queue = 0;
333 save_tree_status (p);
334 if (save_lang_status)
335 (*save_lang_status) (p);
336 if (save_machine_status)
337 (*save_machine_status) (p);
339 current_function = 0;
342 void
343 push_function_context ()
345 push_function_context_to (current_function_decl);
348 /* Restore the last saved context, at the end of a nested function.
349 This function is called from language-specific code. */
351 void
352 pop_function_context_from (context)
353 tree context ATTRIBUTE_UNUSED;
355 struct function *p = outer_function_chain;
356 struct var_refs_queue *queue;
357 struct var_refs_queue *next;
359 current_function = p;
360 outer_function_chain = p->next;
362 current_function_decl = p->decl;
363 reg_renumber = 0;
365 restore_tree_status (p);
366 restore_emit_status (p);
368 if (restore_machine_status)
369 (*restore_machine_status) (p);
370 if (restore_lang_status)
371 (*restore_lang_status) (p);
373 /* Finish doing put_var_into_stack for any of our variables
374 which became addressable during the nested function. */
375 for (queue = p->fixup_var_refs_queue; queue; queue = next)
377 next = queue->next;
378 fixup_var_refs (queue->modified, queue->promoted_mode,
379 queue->unsignedp, 0);
380 free (queue);
382 p->fixup_var_refs_queue = 0;
384 /* Reset variables that have known state during rtx generation. */
385 rtx_equal_function_value_matters = 1;
386 virtuals_instantiated = 0;
389 void
390 pop_function_context ()
392 pop_function_context_from (current_function_decl);
395 /* Clear out all parts of the state in F that can safely be discarded
396 after the function has been parsed, but not compiled, to let
397 garbage collection reclaim the memory. */
399 void
400 free_after_parsing (f)
401 struct function *f;
403 /* f->expr->forced_labels is used by code generation. */
404 /* f->emit->regno_reg_rtx is used by code generation. */
405 /* f->varasm is used by code generation. */
406 /* f->eh->eh_return_stub_label is used by code generation. */
408 if (free_lang_status)
409 (*free_lang_status) (f);
410 free_stmt_status (f);
413 /* Clear out all parts of the state in F that can safely be discarded
414 after the function has been compiled, to let garbage collection
415 reclaim the memory. */
417 void
418 free_after_compilation (f)
419 struct function *f;
421 free_eh_status (f);
422 free_expr_status (f);
423 free_emit_status (f);
424 free_varasm_status (f);
426 if (free_machine_status)
427 (*free_machine_status) (f);
429 free (f->x_parm_reg_stack_loc);
431 f->arg_offset_rtx = NULL;
432 f->return_rtx = NULL;
433 f->internal_arg_pointer = NULL;
434 f->x_nonlocal_labels = NULL;
435 f->x_nonlocal_goto_handler_slots = NULL;
436 f->x_nonlocal_goto_handler_labels = NULL;
437 f->x_nonlocal_goto_stack_level = NULL;
438 f->x_cleanup_label = NULL;
439 f->x_return_label = NULL;
440 f->x_save_expr_regs = NULL;
441 f->x_stack_slot_list = NULL;
442 f->x_rtl_expr_chain = NULL;
443 f->x_tail_recursion_label = NULL;
444 f->x_tail_recursion_reentry = NULL;
445 f->x_arg_pointer_save_area = NULL;
446 f->x_context_display = NULL;
447 f->x_trampoline_list = NULL;
448 f->x_parm_birth_insn = NULL;
449 f->x_last_parm_insn = NULL;
450 f->x_parm_reg_stack_loc = NULL;
451 f->x_temp_slots = NULL;
452 f->fixup_var_refs_queue = NULL;
453 f->original_arg_vector = NULL;
454 f->original_decl_initial = NULL;
455 f->inl_last_parm_insn = NULL;
456 f->epilogue_delay_list = NULL;
460 /* Allocate fixed slots in the stack frame of the current function. */
462 /* Return size needed for stack frame based on slots so far allocated in
463 function F.
464 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
465 the caller may have to do that. */
467 HOST_WIDE_INT
468 get_func_frame_size (f)
469 struct function *f;
471 #ifdef FRAME_GROWS_DOWNWARD
472 return -f->x_frame_offset;
473 #else
474 return f->x_frame_offset;
475 #endif
478 /* Return size needed for stack frame based on slots so far allocated.
479 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
480 the caller may have to do that. */
481 HOST_WIDE_INT
482 get_frame_size ()
484 return get_func_frame_size (current_function);
487 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
488 with machine mode MODE.
490 ALIGN controls the amount of alignment for the address of the slot:
491 0 means according to MODE,
492 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
493 positive specifies alignment boundary in bits.
495 We do not round to stack_boundary here.
497 FUNCTION specifies the function to allocate in. */
499 static rtx
500 assign_stack_local_1 (mode, size, align, function)
501 enum machine_mode mode;
502 HOST_WIDE_INT size;
503 int align;
504 struct function *function;
506 register rtx x, addr;
507 int bigend_correction = 0;
508 int alignment;
510 /* Allocate in the memory associated with the function in whose frame
511 we are assigning. */
512 if (function != current_function)
513 push_obstacks (function->function_obstack,
514 function->function_maybepermanent_obstack);
516 if (align == 0)
518 tree type;
520 alignment = GET_MODE_ALIGNMENT (mode);
521 if (mode == BLKmode)
522 alignment = BIGGEST_ALIGNMENT;
524 /* Allow the target to (possibly) increase the alignment of this
525 stack slot. */
526 type = type_for_mode (mode, 0);
527 if (type)
528 alignment = LOCAL_ALIGNMENT (type, alignment);
530 alignment /= BITS_PER_UNIT;
532 else if (align == -1)
534 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
535 size = CEIL_ROUND (size, alignment);
537 else
538 alignment = align / BITS_PER_UNIT;
540 #ifdef FRAME_GROWS_DOWNWARD
541 function->x_frame_offset -= size;
542 #endif
544 /* Round frame offset to that alignment.
545 We must be careful here, since FRAME_OFFSET might be negative and
546 division with a negative dividend isn't as well defined as we might
547 like. So we instead assume that ALIGNMENT is a power of two and
548 use logical operations which are unambiguous. */
549 #ifdef FRAME_GROWS_DOWNWARD
550 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
551 #else
552 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
553 #endif
555 /* On a big-endian machine, if we are allocating more space than we will use,
556 use the least significant bytes of those that are allocated. */
557 if (BYTES_BIG_ENDIAN && mode != BLKmode)
558 bigend_correction = size - GET_MODE_SIZE (mode);
560 /* If we have already instantiated virtual registers, return the actual
561 address relative to the frame pointer. */
562 if (function == current_function && virtuals_instantiated)
563 addr = plus_constant (frame_pointer_rtx,
564 (frame_offset + bigend_correction
565 + STARTING_FRAME_OFFSET));
566 else
567 addr = plus_constant (virtual_stack_vars_rtx,
568 function->x_frame_offset + bigend_correction);
570 #ifndef FRAME_GROWS_DOWNWARD
571 function->x_frame_offset += size;
572 #endif
574 x = gen_rtx_MEM (mode, addr);
576 function->x_stack_slot_list
577 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
579 if (function != current_function)
580 pop_obstacks ();
582 return x;
585 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
586 current function. */
588 assign_stack_local (mode, size, align)
589 enum machine_mode mode;
590 HOST_WIDE_INT size;
591 int align;
593 return assign_stack_local_1 (mode, size, align, current_function);
596 /* Allocate a temporary stack slot and record it for possible later
597 reuse.
599 MODE is the machine mode to be given to the returned rtx.
601 SIZE is the size in units of the space required. We do no rounding here
602 since assign_stack_local will do any required rounding.
604 KEEP is 1 if this slot is to be retained after a call to
605 free_temp_slots. Automatic variables for a block are allocated
606 with this flag. KEEP is 2 if we allocate a longer term temporary,
607 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
608 if we are to allocate something at an inner level to be treated as
609 a variable in the block (e.g., a SAVE_EXPR).
611 TYPE is the type that will be used for the stack slot. */
613 static rtx
614 assign_stack_temp_for_type (mode, size, keep, type)
615 enum machine_mode mode;
616 HOST_WIDE_INT size;
617 int keep;
618 tree type;
620 int align;
621 int alias_set;
622 struct temp_slot *p, *best_p = 0;
624 /* If SIZE is -1 it means that somebody tried to allocate a temporary
625 of a variable size. */
626 if (size == -1)
627 abort ();
629 /* If we know the alias set for the memory that will be used, use
630 it. If there's no TYPE, then we don't know anything about the
631 alias set for the memory. */
632 if (type)
633 alias_set = get_alias_set (type);
634 else
635 alias_set = 0;
637 align = GET_MODE_ALIGNMENT (mode);
638 if (mode == BLKmode)
639 align = BIGGEST_ALIGNMENT;
641 if (! type)
642 type = type_for_mode (mode, 0);
643 if (type)
644 align = LOCAL_ALIGNMENT (type, align);
646 /* Try to find an available, already-allocated temporary of the proper
647 mode which meets the size and alignment requirements. Choose the
648 smallest one with the closest alignment. */
649 for (p = temp_slots; p; p = p->next)
650 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
651 && ! p->in_use
652 && (!flag_strict_aliasing
653 || (alias_set && p->alias_set == alias_set))
654 && (best_p == 0 || best_p->size > p->size
655 || (best_p->size == p->size && best_p->align > p->align)))
657 if (p->align == align && p->size == size)
659 best_p = 0;
660 break;
662 best_p = p;
665 /* Make our best, if any, the one to use. */
666 if (best_p)
668 /* If there are enough aligned bytes left over, make them into a new
669 temp_slot so that the extra bytes don't get wasted. Do this only
670 for BLKmode slots, so that we can be sure of the alignment. */
671 if (GET_MODE (best_p->slot) == BLKmode
672 /* We can't split slots if -fstrict-aliasing because the
673 information about the alias set for the new slot will be
674 lost. */
675 && !flag_strict_aliasing)
677 int alignment = best_p->align / BITS_PER_UNIT;
678 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
680 if (best_p->size - rounded_size >= alignment)
682 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
683 p->in_use = p->addr_taken = 0;
684 p->size = best_p->size - rounded_size;
685 p->base_offset = best_p->base_offset + rounded_size;
686 p->full_size = best_p->full_size - rounded_size;
687 p->slot = gen_rtx_MEM (BLKmode,
688 plus_constant (XEXP (best_p->slot, 0),
689 rounded_size));
690 p->align = best_p->align;
691 p->address = 0;
692 p->rtl_expr = 0;
693 p->next = temp_slots;
694 temp_slots = p;
696 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
697 stack_slot_list);
699 best_p->size = rounded_size;
700 best_p->full_size = rounded_size;
704 p = best_p;
707 /* If we still didn't find one, make a new temporary. */
708 if (p == 0)
710 HOST_WIDE_INT frame_offset_old = frame_offset;
712 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
714 /* We are passing an explicit alignment request to assign_stack_local.
715 One side effect of that is assign_stack_local will not round SIZE
716 to ensure the frame offset remains suitably aligned.
718 So for requests which depended on the rounding of SIZE, we go ahead
719 and round it now. We also make sure ALIGNMENT is at least
720 BIGGEST_ALIGNMENT. */
721 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
722 abort();
723 p->slot = assign_stack_local (mode,
724 (mode == BLKmode
725 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
726 : size),
727 align);
729 p->align = align;
730 p->alias_set = alias_set;
732 /* The following slot size computation is necessary because we don't
733 know the actual size of the temporary slot until assign_stack_local
734 has performed all the frame alignment and size rounding for the
735 requested temporary. Note that extra space added for alignment
736 can be either above or below this stack slot depending on which
737 way the frame grows. We include the extra space if and only if it
738 is above this slot. */
739 #ifdef FRAME_GROWS_DOWNWARD
740 p->size = frame_offset_old - frame_offset;
741 #else
742 p->size = size;
743 #endif
745 /* Now define the fields used by combine_temp_slots. */
746 #ifdef FRAME_GROWS_DOWNWARD
747 p->base_offset = frame_offset;
748 p->full_size = frame_offset_old - frame_offset;
749 #else
750 p->base_offset = frame_offset_old;
751 p->full_size = frame_offset - frame_offset_old;
752 #endif
753 p->address = 0;
754 p->next = temp_slots;
755 temp_slots = p;
758 p->in_use = 1;
759 p->addr_taken = 0;
760 p->rtl_expr = seq_rtl_expr;
762 if (keep == 2)
764 p->level = target_temp_slot_level;
765 p->keep = 0;
767 else if (keep == 3)
769 p->level = var_temp_slot_level;
770 p->keep = 0;
772 else
774 p->level = temp_slot_level;
775 p->keep = keep;
778 /* We may be reusing an old slot, so clear any MEM flags that may have been
779 set from before. */
780 RTX_UNCHANGING_P (p->slot) = 0;
781 MEM_IN_STRUCT_P (p->slot) = 0;
782 MEM_SCALAR_P (p->slot) = 0;
783 MEM_ALIAS_SET (p->slot) = 0;
784 return p->slot;
787 /* Allocate a temporary stack slot and record it for possible later
788 reuse. First three arguments are same as in preceding function. */
791 assign_stack_temp (mode, size, keep)
792 enum machine_mode mode;
793 HOST_WIDE_INT size;
794 int keep;
796 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
799 /* Assign a temporary of given TYPE.
800 KEEP is as for assign_stack_temp.
801 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
802 it is 0 if a register is OK.
803 DONT_PROMOTE is 1 if we should not promote values in register
804 to wider modes. */
807 assign_temp (type, keep, memory_required, dont_promote)
808 tree type;
809 int keep;
810 int memory_required;
811 int dont_promote;
813 enum machine_mode mode = TYPE_MODE (type);
814 int unsignedp = TREE_UNSIGNED (type);
816 if (mode == BLKmode || memory_required)
818 HOST_WIDE_INT size = int_size_in_bytes (type);
819 rtx tmp;
821 /* Unfortunately, we don't yet know how to allocate variable-sized
822 temporaries. However, sometimes we have a fixed upper limit on
823 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
824 instead. This is the case for Chill variable-sized strings. */
825 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
826 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
827 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
828 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
830 tmp = assign_stack_temp_for_type (mode, size, keep, type);
831 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
832 return tmp;
835 #ifndef PROMOTE_FOR_CALL_ONLY
836 if (! dont_promote)
837 mode = promote_mode (type, mode, &unsignedp, 0);
838 #endif
840 return gen_reg_rtx (mode);
843 /* Combine temporary stack slots which are adjacent on the stack.
845 This allows for better use of already allocated stack space. This is only
846 done for BLKmode slots because we can be sure that we won't have alignment
847 problems in this case. */
849 void
850 combine_temp_slots ()
852 struct temp_slot *p, *q;
853 struct temp_slot *prev_p, *prev_q;
854 int num_slots;
856 /* We can't combine slots, because the information about which slot
857 is in which alias set will be lost. */
858 if (flag_strict_aliasing)
859 return;
861 /* If there are a lot of temp slots, don't do anything unless
862 high levels of optimizaton. */
863 if (! flag_expensive_optimizations)
864 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
865 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
866 return;
868 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
870 int delete_p = 0;
872 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
873 for (q = p->next, prev_q = p; q; q = prev_q->next)
875 int delete_q = 0;
876 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
878 if (p->base_offset + p->full_size == q->base_offset)
880 /* Q comes after P; combine Q into P. */
881 p->size += q->size;
882 p->full_size += q->full_size;
883 delete_q = 1;
885 else if (q->base_offset + q->full_size == p->base_offset)
887 /* P comes after Q; combine P into Q. */
888 q->size += p->size;
889 q->full_size += p->full_size;
890 delete_p = 1;
891 break;
894 /* Either delete Q or advance past it. */
895 if (delete_q)
896 prev_q->next = q->next;
897 else
898 prev_q = q;
900 /* Either delete P or advance past it. */
901 if (delete_p)
903 if (prev_p)
904 prev_p->next = p->next;
905 else
906 temp_slots = p->next;
908 else
909 prev_p = p;
913 /* Find the temp slot corresponding to the object at address X. */
915 static struct temp_slot *
916 find_temp_slot_from_address (x)
917 rtx x;
919 struct temp_slot *p;
920 rtx next;
922 for (p = temp_slots; p; p = p->next)
924 if (! p->in_use)
925 continue;
927 else if (XEXP (p->slot, 0) == x
928 || p->address == x
929 || (GET_CODE (x) == PLUS
930 && XEXP (x, 0) == virtual_stack_vars_rtx
931 && GET_CODE (XEXP (x, 1)) == CONST_INT
932 && INTVAL (XEXP (x, 1)) >= p->base_offset
933 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
934 return p;
936 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
937 for (next = p->address; next; next = XEXP (next, 1))
938 if (XEXP (next, 0) == x)
939 return p;
942 return 0;
945 /* Indicate that NEW is an alternate way of referring to the temp slot
946 that previously was known by OLD. */
948 void
949 update_temp_slot_address (old, new)
950 rtx old, new;
952 struct temp_slot *p = find_temp_slot_from_address (old);
954 /* If none, return. Else add NEW as an alias. */
955 if (p == 0)
956 return;
957 else if (p->address == 0)
958 p->address = new;
959 else
961 if (GET_CODE (p->address) != EXPR_LIST)
962 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
964 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
968 /* If X could be a reference to a temporary slot, mark the fact that its
969 address was taken. */
971 void
972 mark_temp_addr_taken (x)
973 rtx x;
975 struct temp_slot *p;
977 if (x == 0)
978 return;
980 /* If X is not in memory or is at a constant address, it cannot be in
981 a temporary slot. */
982 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
983 return;
985 p = find_temp_slot_from_address (XEXP (x, 0));
986 if (p != 0)
987 p->addr_taken = 1;
990 /* If X could be a reference to a temporary slot, mark that slot as
991 belonging to the to one level higher than the current level. If X
992 matched one of our slots, just mark that one. Otherwise, we can't
993 easily predict which it is, so upgrade all of them. Kept slots
994 need not be touched.
996 This is called when an ({...}) construct occurs and a statement
997 returns a value in memory. */
999 void
1000 preserve_temp_slots (x)
1001 rtx x;
1003 struct temp_slot *p = 0;
1005 /* If there is no result, we still might have some objects whose address
1006 were taken, so we need to make sure they stay around. */
1007 if (x == 0)
1009 for (p = temp_slots; p; p = p->next)
1010 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1011 p->level--;
1013 return;
1016 /* If X is a register that is being used as a pointer, see if we have
1017 a temporary slot we know it points to. To be consistent with
1018 the code below, we really should preserve all non-kept slots
1019 if we can't find a match, but that seems to be much too costly. */
1020 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1021 p = find_temp_slot_from_address (x);
1023 /* If X is not in memory or is at a constant address, it cannot be in
1024 a temporary slot, but it can contain something whose address was
1025 taken. */
1026 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1028 for (p = temp_slots; p; p = p->next)
1029 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1030 p->level--;
1032 return;
1035 /* First see if we can find a match. */
1036 if (p == 0)
1037 p = find_temp_slot_from_address (XEXP (x, 0));
1039 if (p != 0)
1041 /* Move everything at our level whose address was taken to our new
1042 level in case we used its address. */
1043 struct temp_slot *q;
1045 if (p->level == temp_slot_level)
1047 for (q = temp_slots; q; q = q->next)
1048 if (q != p && q->addr_taken && q->level == p->level)
1049 q->level--;
1051 p->level--;
1052 p->addr_taken = 0;
1054 return;
1057 /* Otherwise, preserve all non-kept slots at this level. */
1058 for (p = temp_slots; p; p = p->next)
1059 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1060 p->level--;
1063 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1064 with that RTL_EXPR, promote it into a temporary slot at the present
1065 level so it will not be freed when we free slots made in the
1066 RTL_EXPR. */
1068 void
1069 preserve_rtl_expr_result (x)
1070 rtx x;
1072 struct temp_slot *p;
1074 /* If X is not in memory or is at a constant address, it cannot be in
1075 a temporary slot. */
1076 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1077 return;
1079 /* If we can find a match, move it to our level unless it is already at
1080 an upper level. */
1081 p = find_temp_slot_from_address (XEXP (x, 0));
1082 if (p != 0)
1084 p->level = MIN (p->level, temp_slot_level);
1085 p->rtl_expr = 0;
1088 return;
1091 /* Free all temporaries used so far. This is normally called at the end
1092 of generating code for a statement. Don't free any temporaries
1093 currently in use for an RTL_EXPR that hasn't yet been emitted.
1094 We could eventually do better than this since it can be reused while
1095 generating the same RTL_EXPR, but this is complex and probably not
1096 worthwhile. */
1098 void
1099 free_temp_slots ()
1101 struct temp_slot *p;
1103 for (p = temp_slots; p; p = p->next)
1104 if (p->in_use && p->level == temp_slot_level && ! p->keep
1105 && p->rtl_expr == 0)
1106 p->in_use = 0;
1108 combine_temp_slots ();
1111 /* Free all temporary slots used in T, an RTL_EXPR node. */
1113 void
1114 free_temps_for_rtl_expr (t)
1115 tree t;
1117 struct temp_slot *p;
1119 for (p = temp_slots; p; p = p->next)
1120 if (p->rtl_expr == t)
1121 p->in_use = 0;
1123 combine_temp_slots ();
1126 /* Mark all temporaries ever allocated in this function as not suitable
1127 for reuse until the current level is exited. */
1129 void
1130 mark_all_temps_used ()
1132 struct temp_slot *p;
1134 for (p = temp_slots; p; p = p->next)
1136 p->in_use = p->keep = 1;
1137 p->level = MIN (p->level, temp_slot_level);
1141 /* Push deeper into the nesting level for stack temporaries. */
1143 void
1144 push_temp_slots ()
1146 temp_slot_level++;
1149 /* Likewise, but save the new level as the place to allocate variables
1150 for blocks. */
1152 #if 0
1153 void
1154 push_temp_slots_for_block ()
1156 push_temp_slots ();
1158 var_temp_slot_level = temp_slot_level;
1161 /* Likewise, but save the new level as the place to allocate temporaries
1162 for TARGET_EXPRs. */
1164 void
1165 push_temp_slots_for_target ()
1167 push_temp_slots ();
1169 target_temp_slot_level = temp_slot_level;
1172 /* Set and get the value of target_temp_slot_level. The only
1173 permitted use of these functions is to save and restore this value. */
1176 get_target_temp_slot_level ()
1178 return target_temp_slot_level;
1181 void
1182 set_target_temp_slot_level (level)
1183 int level;
1185 target_temp_slot_level = level;
1187 #endif
1189 /* Pop a temporary nesting level. All slots in use in the current level
1190 are freed. */
1192 void
1193 pop_temp_slots ()
1195 struct temp_slot *p;
1197 for (p = temp_slots; p; p = p->next)
1198 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1199 p->in_use = 0;
1201 combine_temp_slots ();
1203 temp_slot_level--;
1206 /* Initialize temporary slots. */
1208 void
1209 init_temp_slots ()
1211 /* We have not allocated any temporaries yet. */
1212 temp_slots = 0;
1213 temp_slot_level = 0;
1214 var_temp_slot_level = 0;
1215 target_temp_slot_level = 0;
1218 /* Retroactively move an auto variable from a register to a stack slot.
1219 This is done when an address-reference to the variable is seen. */
1221 void
1222 put_var_into_stack (decl)
1223 tree decl;
1225 register rtx reg;
1226 enum machine_mode promoted_mode, decl_mode;
1227 struct function *function = 0;
1228 tree context;
1229 int can_use_addressof;
1231 context = decl_function_context (decl);
1233 /* Get the current rtl used for this object and its original mode. */
1234 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1236 /* No need to do anything if decl has no rtx yet
1237 since in that case caller is setting TREE_ADDRESSABLE
1238 and a stack slot will be assigned when the rtl is made. */
1239 if (reg == 0)
1240 return;
1242 /* Get the declared mode for this object. */
1243 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1244 : DECL_MODE (decl));
1245 /* Get the mode it's actually stored in. */
1246 promoted_mode = GET_MODE (reg);
1248 /* If this variable comes from an outer function,
1249 find that function's saved context. */
1250 if (context != current_function_decl && context != inline_function_decl)
1251 for (function = outer_function_chain; function; function = function->next)
1252 if (function->decl == context)
1253 break;
1255 /* If this is a variable-size object with a pseudo to address it,
1256 put that pseudo into the stack, if the var is nonlocal. */
1257 if (DECL_NONLOCAL (decl)
1258 && GET_CODE (reg) == MEM
1259 && GET_CODE (XEXP (reg, 0)) == REG
1260 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1262 reg = XEXP (reg, 0);
1263 decl_mode = promoted_mode = GET_MODE (reg);
1266 can_use_addressof
1267 = (function == 0
1268 && optimize > 0
1269 /* FIXME make it work for promoted modes too */
1270 && decl_mode == promoted_mode
1271 #ifdef NON_SAVING_SETJMP
1272 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1273 #endif
1276 /* If we can't use ADDRESSOF, make sure we see through one we already
1277 generated. */
1278 if (! can_use_addressof && GET_CODE (reg) == MEM
1279 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1280 reg = XEXP (XEXP (reg, 0), 0);
1282 /* Now we should have a value that resides in one or more pseudo regs. */
1284 if (GET_CODE (reg) == REG)
1286 /* If this variable lives in the current function and we don't need
1287 to put things in the stack for the sake of setjmp, try to keep it
1288 in a register until we know we actually need the address. */
1289 if (can_use_addressof)
1290 gen_mem_addressof (reg, decl);
1291 else
1292 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1293 promoted_mode, decl_mode,
1294 TREE_SIDE_EFFECTS (decl), 0,
1295 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1298 else if (GET_CODE (reg) == CONCAT)
1300 /* A CONCAT contains two pseudos; put them both in the stack.
1301 We do it so they end up consecutive. */
1302 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1303 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1304 #ifdef FRAME_GROWS_DOWNWARD
1305 /* Since part 0 should have a lower address, do it second. */
1306 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1307 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1308 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1310 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1311 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1312 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1314 #else
1315 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1316 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1317 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1319 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1320 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1321 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1323 #endif
1325 /* Change the CONCAT into a combined MEM for both parts. */
1326 PUT_CODE (reg, MEM);
1327 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1328 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1330 /* The two parts are in memory order already.
1331 Use the lower parts address as ours. */
1332 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1333 /* Prevent sharing of rtl that might lose. */
1334 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1335 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1337 else
1338 return;
1340 if (current_function_check_memory_usage)
1341 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1342 XEXP (reg, 0), Pmode,
1343 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1344 TYPE_MODE (sizetype),
1345 GEN_INT (MEMORY_USE_RW),
1346 TYPE_MODE (integer_type_node));
1349 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1350 into the stack frame of FUNCTION (0 means the current function).
1351 DECL_MODE is the machine mode of the user-level data type.
1352 PROMOTED_MODE is the machine mode of the register.
1353 VOLATILE_P is nonzero if this is for a "volatile" decl.
1354 USED_P is nonzero if this reg might have already been used in an insn. */
1356 static void
1357 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1358 original_regno, used_p, ht)
1359 struct function *function;
1360 rtx reg;
1361 tree type;
1362 enum machine_mode promoted_mode, decl_mode;
1363 int volatile_p;
1364 int original_regno;
1365 int used_p;
1366 struct hash_table *ht;
1368 struct function *func = function ? function : current_function;
1369 rtx new = 0;
1370 int regno = original_regno;
1372 if (regno == 0)
1373 regno = REGNO (reg);
1375 if (regno < func->x_max_parm_reg)
1376 new = func->x_parm_reg_stack_loc[regno];
1377 if (new == 0)
1378 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1380 PUT_CODE (reg, MEM);
1381 PUT_MODE (reg, decl_mode);
1382 XEXP (reg, 0) = XEXP (new, 0);
1383 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1384 MEM_VOLATILE_P (reg) = volatile_p;
1386 /* If this is a memory ref that contains aggregate components,
1387 mark it as such for cse and loop optimize. If we are reusing a
1388 previously generated stack slot, then we need to copy the bit in
1389 case it was set for other reasons. For instance, it is set for
1390 __builtin_va_alist. */
1391 MEM_SET_IN_STRUCT_P (reg,
1392 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1393 MEM_ALIAS_SET (reg) = get_alias_set (type);
1395 /* Now make sure that all refs to the variable, previously made
1396 when it was a register, are fixed up to be valid again. */
1398 if (used_p && function != 0)
1400 struct var_refs_queue *temp;
1402 temp
1403 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1404 temp->modified = reg;
1405 temp->promoted_mode = promoted_mode;
1406 temp->unsignedp = TREE_UNSIGNED (type);
1407 temp->next = function->fixup_var_refs_queue;
1408 function->fixup_var_refs_queue = temp;
1410 else if (used_p)
1411 /* Variable is local; fix it up now. */
1412 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1415 static void
1416 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1417 rtx var;
1418 enum machine_mode promoted_mode;
1419 int unsignedp;
1420 struct hash_table *ht;
1422 tree pending;
1423 rtx first_insn = get_insns ();
1424 struct sequence_stack *stack = seq_stack;
1425 tree rtl_exps = rtl_expr_chain;
1427 /* Must scan all insns for stack-refs that exceed the limit. */
1428 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1429 stack == 0, ht);
1430 /* If there's a hash table, it must record all uses of VAR. */
1431 if (ht)
1432 return;
1434 /* Scan all pending sequences too. */
1435 for (; stack; stack = stack->next)
1437 push_to_sequence (stack->first);
1438 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1439 stack->first, stack->next != 0, 0);
1440 /* Update remembered end of sequence
1441 in case we added an insn at the end. */
1442 stack->last = get_last_insn ();
1443 end_sequence ();
1446 /* Scan all waiting RTL_EXPRs too. */
1447 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1449 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1450 if (seq != const0_rtx && seq != 0)
1452 push_to_sequence (seq);
1453 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1455 end_sequence ();
1459 /* Scan the catch clauses for exception handling too. */
1460 push_to_sequence (catch_clauses);
1461 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1462 0, 0);
1463 end_sequence ();
1466 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1467 some part of an insn. Return a struct fixup_replacement whose OLD
1468 value is equal to X. Allocate a new structure if no such entry exists. */
1470 static struct fixup_replacement *
1471 find_fixup_replacement (replacements, x)
1472 struct fixup_replacement **replacements;
1473 rtx x;
1475 struct fixup_replacement *p;
1477 /* See if we have already replaced this. */
1478 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1481 if (p == 0)
1483 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1484 p->old = x;
1485 p->new = 0;
1486 p->next = *replacements;
1487 *replacements = p;
1490 return p;
1493 /* Scan the insn-chain starting with INSN for refs to VAR
1494 and fix them up. TOPLEVEL is nonzero if this chain is the
1495 main chain of insns for the current function. */
1497 static void
1498 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1499 rtx var;
1500 enum machine_mode promoted_mode;
1501 int unsignedp;
1502 rtx insn;
1503 int toplevel;
1504 struct hash_table *ht;
1506 rtx call_dest = 0;
1507 rtx insn_list = NULL_RTX;
1509 /* If we already know which INSNs reference VAR there's no need
1510 to walk the entire instruction chain. */
1511 if (ht)
1513 insn_list = ((struct insns_for_mem_entry *)
1514 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1515 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1516 insn_list = XEXP (insn_list, 1);
1519 while (insn)
1521 rtx next = NEXT_INSN (insn);
1522 rtx set, prev, prev_set;
1523 rtx note;
1525 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1527 /* Remember the notes in case we delete the insn. */
1528 note = REG_NOTES (insn);
1530 /* If this is a CLOBBER of VAR, delete it.
1532 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1533 and REG_RETVAL notes too. */
1534 if (GET_CODE (PATTERN (insn)) == CLOBBER
1535 && (XEXP (PATTERN (insn), 0) == var
1536 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1537 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1538 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1540 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1541 /* The REG_LIBCALL note will go away since we are going to
1542 turn INSN into a NOTE, so just delete the
1543 corresponding REG_RETVAL note. */
1544 remove_note (XEXP (note, 0),
1545 find_reg_note (XEXP (note, 0), REG_RETVAL,
1546 NULL_RTX));
1548 /* In unoptimized compilation, we shouldn't call delete_insn
1549 except in jump.c doing warnings. */
1550 PUT_CODE (insn, NOTE);
1551 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1552 NOTE_SOURCE_FILE (insn) = 0;
1555 /* The insn to load VAR from a home in the arglist
1556 is now a no-op. When we see it, just delete it.
1557 Similarly if this is storing VAR from a register from which
1558 it was loaded in the previous insn. This will occur
1559 when an ADDRESSOF was made for an arglist slot. */
1560 else if (toplevel
1561 && (set = single_set (insn)) != 0
1562 && SET_DEST (set) == var
1563 /* If this represents the result of an insn group,
1564 don't delete the insn. */
1565 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1566 && (rtx_equal_p (SET_SRC (set), var)
1567 || (GET_CODE (SET_SRC (set)) == REG
1568 && (prev = prev_nonnote_insn (insn)) != 0
1569 && (prev_set = single_set (prev)) != 0
1570 && SET_DEST (prev_set) == SET_SRC (set)
1571 && rtx_equal_p (SET_SRC (prev_set), var))))
1573 /* In unoptimized compilation, we shouldn't call delete_insn
1574 except in jump.c doing warnings. */
1575 PUT_CODE (insn, NOTE);
1576 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1577 NOTE_SOURCE_FILE (insn) = 0;
1578 if (insn == last_parm_insn)
1579 last_parm_insn = PREV_INSN (next);
1581 else
1583 struct fixup_replacement *replacements = 0;
1584 rtx next_insn = NEXT_INSN (insn);
1586 if (SMALL_REGISTER_CLASSES)
1588 /* If the insn that copies the results of a CALL_INSN
1589 into a pseudo now references VAR, we have to use an
1590 intermediate pseudo since we want the life of the
1591 return value register to be only a single insn.
1593 If we don't use an intermediate pseudo, such things as
1594 address computations to make the address of VAR valid
1595 if it is not can be placed between the CALL_INSN and INSN.
1597 To make sure this doesn't happen, we record the destination
1598 of the CALL_INSN and see if the next insn uses both that
1599 and VAR. */
1601 if (call_dest != 0 && GET_CODE (insn) == INSN
1602 && reg_mentioned_p (var, PATTERN (insn))
1603 && reg_mentioned_p (call_dest, PATTERN (insn)))
1605 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1607 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1609 PATTERN (insn) = replace_rtx (PATTERN (insn),
1610 call_dest, temp);
1613 if (GET_CODE (insn) == CALL_INSN
1614 && GET_CODE (PATTERN (insn)) == SET)
1615 call_dest = SET_DEST (PATTERN (insn));
1616 else if (GET_CODE (insn) == CALL_INSN
1617 && GET_CODE (PATTERN (insn)) == PARALLEL
1618 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1619 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1620 else
1621 call_dest = 0;
1624 /* See if we have to do anything to INSN now that VAR is in
1625 memory. If it needs to be loaded into a pseudo, use a single
1626 pseudo for the entire insn in case there is a MATCH_DUP
1627 between two operands. We pass a pointer to the head of
1628 a list of struct fixup_replacements. If fixup_var_refs_1
1629 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1630 it will record them in this list.
1632 If it allocated a pseudo for any replacement, we copy into
1633 it here. */
1635 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1636 &replacements);
1638 /* If this is last_parm_insn, and any instructions were output
1639 after it to fix it up, then we must set last_parm_insn to
1640 the last such instruction emitted. */
1641 if (insn == last_parm_insn)
1642 last_parm_insn = PREV_INSN (next_insn);
1644 while (replacements)
1646 if (GET_CODE (replacements->new) == REG)
1648 rtx insert_before;
1649 rtx seq;
1651 /* OLD might be a (subreg (mem)). */
1652 if (GET_CODE (replacements->old) == SUBREG)
1653 replacements->old
1654 = fixup_memory_subreg (replacements->old, insn, 0);
1655 else
1656 replacements->old
1657 = fixup_stack_1 (replacements->old, insn);
1659 insert_before = insn;
1661 /* If we are changing the mode, do a conversion.
1662 This might be wasteful, but combine.c will
1663 eliminate much of the waste. */
1665 if (GET_MODE (replacements->new)
1666 != GET_MODE (replacements->old))
1668 start_sequence ();
1669 convert_move (replacements->new,
1670 replacements->old, unsignedp);
1671 seq = gen_sequence ();
1672 end_sequence ();
1674 else
1675 seq = gen_move_insn (replacements->new,
1676 replacements->old);
1678 emit_insn_before (seq, insert_before);
1681 replacements = replacements->next;
1685 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1686 But don't touch other insns referred to by reg-notes;
1687 we will get them elsewhere. */
1688 while (note)
1690 if (GET_CODE (note) != INSN_LIST)
1691 XEXP (note, 0)
1692 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1693 note = XEXP (note, 1);
1697 if (!ht)
1698 insn = next;
1699 else if (insn_list)
1701 insn = XEXP (insn_list, 0);
1702 insn_list = XEXP (insn_list, 1);
1704 else
1705 insn = NULL_RTX;
1709 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1710 See if the rtx expression at *LOC in INSN needs to be changed.
1712 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1713 contain a list of original rtx's and replacements. If we find that we need
1714 to modify this insn by replacing a memory reference with a pseudo or by
1715 making a new MEM to implement a SUBREG, we consult that list to see if
1716 we have already chosen a replacement. If none has already been allocated,
1717 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1718 or the SUBREG, as appropriate, to the pseudo. */
1720 static void
1721 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1722 register rtx var;
1723 enum machine_mode promoted_mode;
1724 register rtx *loc;
1725 rtx insn;
1726 struct fixup_replacement **replacements;
1728 register int i;
1729 register rtx x = *loc;
1730 RTX_CODE code = GET_CODE (x);
1731 register const char *fmt;
1732 register rtx tem, tem1;
1733 struct fixup_replacement *replacement;
1735 switch (code)
1737 case ADDRESSOF:
1738 if (XEXP (x, 0) == var)
1740 /* Prevent sharing of rtl that might lose. */
1741 rtx sub = copy_rtx (XEXP (var, 0));
1743 if (! validate_change (insn, loc, sub, 0))
1745 rtx y = gen_reg_rtx (GET_MODE (sub));
1746 rtx seq, new_insn;
1748 /* We should be able to replace with a register or all is lost.
1749 Note that we can't use validate_change to verify this, since
1750 we're not caring for replacing all dups simultaneously. */
1751 if (! validate_replace_rtx (*loc, y, insn))
1752 abort ();
1754 /* Careful! First try to recognize a direct move of the
1755 value, mimicking how things are done in gen_reload wrt
1756 PLUS. Consider what happens when insn is a conditional
1757 move instruction and addsi3 clobbers flags. */
1759 start_sequence ();
1760 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1761 seq = gen_sequence ();
1762 end_sequence ();
1764 if (recog_memoized (new_insn) < 0)
1766 /* That failed. Fall back on force_operand and hope. */
1768 start_sequence ();
1769 force_operand (sub, y);
1770 seq = gen_sequence ();
1771 end_sequence ();
1774 #ifdef HAVE_cc0
1775 /* Don't separate setter from user. */
1776 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1777 insn = PREV_INSN (insn);
1778 #endif
1780 emit_insn_before (seq, insn);
1783 return;
1785 case MEM:
1786 if (var == x)
1788 /* If we already have a replacement, use it. Otherwise,
1789 try to fix up this address in case it is invalid. */
1791 replacement = find_fixup_replacement (replacements, var);
1792 if (replacement->new)
1794 *loc = replacement->new;
1795 return;
1798 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1800 /* Unless we are forcing memory to register or we changed the mode,
1801 we can leave things the way they are if the insn is valid. */
1803 INSN_CODE (insn) = -1;
1804 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1805 && recog_memoized (insn) >= 0)
1806 return;
1808 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1809 return;
1812 /* If X contains VAR, we need to unshare it here so that we update
1813 each occurrence separately. But all identical MEMs in one insn
1814 must be replaced with the same rtx because of the possibility of
1815 MATCH_DUPs. */
1817 if (reg_mentioned_p (var, x))
1819 replacement = find_fixup_replacement (replacements, x);
1820 if (replacement->new == 0)
1821 replacement->new = copy_most_rtx (x, var);
1823 *loc = x = replacement->new;
1825 break;
1827 case REG:
1828 case CC0:
1829 case PC:
1830 case CONST_INT:
1831 case CONST:
1832 case SYMBOL_REF:
1833 case LABEL_REF:
1834 case CONST_DOUBLE:
1835 return;
1837 case SIGN_EXTRACT:
1838 case ZERO_EXTRACT:
1839 /* Note that in some cases those types of expressions are altered
1840 by optimize_bit_field, and do not survive to get here. */
1841 if (XEXP (x, 0) == var
1842 || (GET_CODE (XEXP (x, 0)) == SUBREG
1843 && SUBREG_REG (XEXP (x, 0)) == var))
1845 /* Get TEM as a valid MEM in the mode presently in the insn.
1847 We don't worry about the possibility of MATCH_DUP here; it
1848 is highly unlikely and would be tricky to handle. */
1850 tem = XEXP (x, 0);
1851 if (GET_CODE (tem) == SUBREG)
1853 if (GET_MODE_BITSIZE (GET_MODE (tem))
1854 > GET_MODE_BITSIZE (GET_MODE (var)))
1856 replacement = find_fixup_replacement (replacements, var);
1857 if (replacement->new == 0)
1858 replacement->new = gen_reg_rtx (GET_MODE (var));
1859 SUBREG_REG (tem) = replacement->new;
1861 else
1862 tem = fixup_memory_subreg (tem, insn, 0);
1864 else
1865 tem = fixup_stack_1 (tem, insn);
1867 /* Unless we want to load from memory, get TEM into the proper mode
1868 for an extract from memory. This can only be done if the
1869 extract is at a constant position and length. */
1871 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1872 && GET_CODE (XEXP (x, 2)) == CONST_INT
1873 && ! mode_dependent_address_p (XEXP (tem, 0))
1874 && ! MEM_VOLATILE_P (tem))
1876 enum machine_mode wanted_mode = VOIDmode;
1877 enum machine_mode is_mode = GET_MODE (tem);
1878 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1880 #ifdef HAVE_extzv
1881 if (GET_CODE (x) == ZERO_EXTRACT)
1883 wanted_mode
1884 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1885 if (wanted_mode == VOIDmode)
1886 wanted_mode = word_mode;
1888 #endif
1889 #ifdef HAVE_extv
1890 if (GET_CODE (x) == SIGN_EXTRACT)
1892 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1893 if (wanted_mode == VOIDmode)
1894 wanted_mode = word_mode;
1896 #endif
1897 /* If we have a narrower mode, we can do something. */
1898 if (wanted_mode != VOIDmode
1899 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1901 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1902 rtx old_pos = XEXP (x, 2);
1903 rtx newmem;
1905 /* If the bytes and bits are counted differently, we
1906 must adjust the offset. */
1907 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1908 offset = (GET_MODE_SIZE (is_mode)
1909 - GET_MODE_SIZE (wanted_mode) - offset);
1911 pos %= GET_MODE_BITSIZE (wanted_mode);
1913 newmem = gen_rtx_MEM (wanted_mode,
1914 plus_constant (XEXP (tem, 0), offset));
1915 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1916 MEM_COPY_ATTRIBUTES (newmem, tem);
1918 /* Make the change and see if the insn remains valid. */
1919 INSN_CODE (insn) = -1;
1920 XEXP (x, 0) = newmem;
1921 XEXP (x, 2) = GEN_INT (pos);
1923 if (recog_memoized (insn) >= 0)
1924 return;
1926 /* Otherwise, restore old position. XEXP (x, 0) will be
1927 restored later. */
1928 XEXP (x, 2) = old_pos;
1932 /* If we get here, the bitfield extract insn can't accept a memory
1933 reference. Copy the input into a register. */
1935 tem1 = gen_reg_rtx (GET_MODE (tem));
1936 emit_insn_before (gen_move_insn (tem1, tem), insn);
1937 XEXP (x, 0) = tem1;
1938 return;
1940 break;
1942 case SUBREG:
1943 if (SUBREG_REG (x) == var)
1945 /* If this is a special SUBREG made because VAR was promoted
1946 from a wider mode, replace it with VAR and call ourself
1947 recursively, this time saying that the object previously
1948 had its current mode (by virtue of the SUBREG). */
1950 if (SUBREG_PROMOTED_VAR_P (x))
1952 *loc = var;
1953 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1954 return;
1957 /* If this SUBREG makes VAR wider, it has become a paradoxical
1958 SUBREG with VAR in memory, but these aren't allowed at this
1959 stage of the compilation. So load VAR into a pseudo and take
1960 a SUBREG of that pseudo. */
1961 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new == 0)
1965 replacement->new = gen_reg_rtx (GET_MODE (var));
1966 SUBREG_REG (x) = replacement->new;
1967 return;
1970 /* See if we have already found a replacement for this SUBREG.
1971 If so, use it. Otherwise, make a MEM and see if the insn
1972 is recognized. If not, or if we should force MEM into a register,
1973 make a pseudo for this SUBREG. */
1974 replacement = find_fixup_replacement (replacements, x);
1975 if (replacement->new)
1977 *loc = replacement->new;
1978 return;
1981 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1983 INSN_CODE (insn) = -1;
1984 if (! flag_force_mem && recog_memoized (insn) >= 0)
1985 return;
1987 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1988 return;
1990 break;
1992 case SET:
1993 /* First do special simplification of bit-field references. */
1994 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1995 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1996 optimize_bit_field (x, insn, 0);
1997 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1998 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1999 optimize_bit_field (x, insn, NULL_PTR);
2001 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2002 into a register and then store it back out. */
2003 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2004 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2005 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2006 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2007 > GET_MODE_SIZE (GET_MODE (var))))
2009 replacement = find_fixup_replacement (replacements, var);
2010 if (replacement->new == 0)
2011 replacement->new = gen_reg_rtx (GET_MODE (var));
2013 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2014 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2017 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2018 insn into a pseudo and store the low part of the pseudo into VAR. */
2019 if (GET_CODE (SET_DEST (x)) == SUBREG
2020 && SUBREG_REG (SET_DEST (x)) == var
2021 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2022 > GET_MODE_SIZE (GET_MODE (var))))
2024 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2025 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2026 tem)),
2027 insn);
2028 break;
2032 rtx dest = SET_DEST (x);
2033 rtx src = SET_SRC (x);
2034 #ifdef HAVE_insv
2035 rtx outerdest = dest;
2036 #endif
2038 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2039 || GET_CODE (dest) == SIGN_EXTRACT
2040 || GET_CODE (dest) == ZERO_EXTRACT)
2041 dest = XEXP (dest, 0);
2043 if (GET_CODE (src) == SUBREG)
2044 src = XEXP (src, 0);
2046 /* If VAR does not appear at the top level of the SET
2047 just scan the lower levels of the tree. */
2049 if (src != var && dest != var)
2050 break;
2052 /* We will need to rerecognize this insn. */
2053 INSN_CODE (insn) = -1;
2055 #ifdef HAVE_insv
2056 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2058 /* Since this case will return, ensure we fixup all the
2059 operands here. */
2060 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2061 insn, replacements);
2062 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2063 insn, replacements);
2064 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2065 insn, replacements);
2067 tem = XEXP (outerdest, 0);
2069 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2070 that may appear inside a ZERO_EXTRACT.
2071 This was legitimate when the MEM was a REG. */
2072 if (GET_CODE (tem) == SUBREG
2073 && SUBREG_REG (tem) == var)
2074 tem = fixup_memory_subreg (tem, insn, 0);
2075 else
2076 tem = fixup_stack_1 (tem, insn);
2078 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2079 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2080 && ! mode_dependent_address_p (XEXP (tem, 0))
2081 && ! MEM_VOLATILE_P (tem))
2083 enum machine_mode wanted_mode;
2084 enum machine_mode is_mode = GET_MODE (tem);
2085 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2087 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2088 if (wanted_mode == VOIDmode)
2089 wanted_mode = word_mode;
2091 /* If we have a narrower mode, we can do something. */
2092 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2094 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2095 rtx old_pos = XEXP (outerdest, 2);
2096 rtx newmem;
2098 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2099 offset = (GET_MODE_SIZE (is_mode)
2100 - GET_MODE_SIZE (wanted_mode) - offset);
2102 pos %= GET_MODE_BITSIZE (wanted_mode);
2104 newmem = gen_rtx_MEM (wanted_mode,
2105 plus_constant (XEXP (tem, 0),
2106 offset));
2107 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2108 MEM_COPY_ATTRIBUTES (newmem, tem);
2110 /* Make the change and see if the insn remains valid. */
2111 INSN_CODE (insn) = -1;
2112 XEXP (outerdest, 0) = newmem;
2113 XEXP (outerdest, 2) = GEN_INT (pos);
2115 if (recog_memoized (insn) >= 0)
2116 return;
2118 /* Otherwise, restore old position. XEXP (x, 0) will be
2119 restored later. */
2120 XEXP (outerdest, 2) = old_pos;
2124 /* If we get here, the bit-field store doesn't allow memory
2125 or isn't located at a constant position. Load the value into
2126 a register, do the store, and put it back into memory. */
2128 tem1 = gen_reg_rtx (GET_MODE (tem));
2129 emit_insn_before (gen_move_insn (tem1, tem), insn);
2130 emit_insn_after (gen_move_insn (tem, tem1), insn);
2131 XEXP (outerdest, 0) = tem1;
2132 return;
2134 #endif
2136 /* STRICT_LOW_PART is a no-op on memory references
2137 and it can cause combinations to be unrecognizable,
2138 so eliminate it. */
2140 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2141 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2143 /* A valid insn to copy VAR into or out of a register
2144 must be left alone, to avoid an infinite loop here.
2145 If the reference to VAR is by a subreg, fix that up,
2146 since SUBREG is not valid for a memref.
2147 Also fix up the address of the stack slot.
2149 Note that we must not try to recognize the insn until
2150 after we know that we have valid addresses and no
2151 (subreg (mem ...) ...) constructs, since these interfere
2152 with determining the validity of the insn. */
2154 if ((SET_SRC (x) == var
2155 || (GET_CODE (SET_SRC (x)) == SUBREG
2156 && SUBREG_REG (SET_SRC (x)) == var))
2157 && (GET_CODE (SET_DEST (x)) == REG
2158 || (GET_CODE (SET_DEST (x)) == SUBREG
2159 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2160 && GET_MODE (var) == promoted_mode
2161 && x == single_set (insn))
2163 rtx pat;
2165 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2166 if (replacement->new)
2167 SET_SRC (x) = replacement->new;
2168 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2169 SET_SRC (x) = replacement->new
2170 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2171 else
2172 SET_SRC (x) = replacement->new
2173 = fixup_stack_1 (SET_SRC (x), insn);
2175 if (recog_memoized (insn) >= 0)
2176 return;
2178 /* INSN is not valid, but we know that we want to
2179 copy SET_SRC (x) to SET_DEST (x) in some way. So
2180 we generate the move and see whether it requires more
2181 than one insn. If it does, we emit those insns and
2182 delete INSN. Otherwise, we an just replace the pattern
2183 of INSN; we have already verified above that INSN has
2184 no other function that to do X. */
2186 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2187 if (GET_CODE (pat) == SEQUENCE)
2189 emit_insn_after (pat, insn);
2190 PUT_CODE (insn, NOTE);
2191 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2192 NOTE_SOURCE_FILE (insn) = 0;
2194 else
2195 PATTERN (insn) = pat;
2197 return;
2200 if ((SET_DEST (x) == var
2201 || (GET_CODE (SET_DEST (x)) == SUBREG
2202 && SUBREG_REG (SET_DEST (x)) == var))
2203 && (GET_CODE (SET_SRC (x)) == REG
2204 || (GET_CODE (SET_SRC (x)) == SUBREG
2205 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2206 && GET_MODE (var) == promoted_mode
2207 && x == single_set (insn))
2209 rtx pat;
2211 if (GET_CODE (SET_DEST (x)) == SUBREG)
2212 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2213 else
2214 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2216 if (recog_memoized (insn) >= 0)
2217 return;
2219 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2220 if (GET_CODE (pat) == SEQUENCE)
2222 emit_insn_after (pat, insn);
2223 PUT_CODE (insn, NOTE);
2224 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2225 NOTE_SOURCE_FILE (insn) = 0;
2227 else
2228 PATTERN (insn) = pat;
2230 return;
2233 /* Otherwise, storing into VAR must be handled specially
2234 by storing into a temporary and copying that into VAR
2235 with a new insn after this one. Note that this case
2236 will be used when storing into a promoted scalar since
2237 the insn will now have different modes on the input
2238 and output and hence will be invalid (except for the case
2239 of setting it to a constant, which does not need any
2240 change if it is valid). We generate extra code in that case,
2241 but combine.c will eliminate it. */
2243 if (dest == var)
2245 rtx temp;
2246 rtx fixeddest = SET_DEST (x);
2248 /* STRICT_LOW_PART can be discarded, around a MEM. */
2249 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2250 fixeddest = XEXP (fixeddest, 0);
2251 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2252 if (GET_CODE (fixeddest) == SUBREG)
2254 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2255 promoted_mode = GET_MODE (fixeddest);
2257 else
2258 fixeddest = fixup_stack_1 (fixeddest, insn);
2260 temp = gen_reg_rtx (promoted_mode);
2262 emit_insn_after (gen_move_insn (fixeddest,
2263 gen_lowpart (GET_MODE (fixeddest),
2264 temp)),
2265 insn);
2267 SET_DEST (x) = temp;
2271 default:
2272 break;
2275 /* Nothing special about this RTX; fix its operands. */
2277 fmt = GET_RTX_FORMAT (code);
2278 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2280 if (fmt[i] == 'e')
2281 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2282 if (fmt[i] == 'E')
2284 register int j;
2285 for (j = 0; j < XVECLEN (x, i); j++)
2286 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2287 insn, replacements);
2292 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2293 return an rtx (MEM:m1 newaddr) which is equivalent.
2294 If any insns must be emitted to compute NEWADDR, put them before INSN.
2296 UNCRITICAL nonzero means accept paradoxical subregs.
2297 This is used for subregs found inside REG_NOTES. */
2299 static rtx
2300 fixup_memory_subreg (x, insn, uncritical)
2301 rtx x;
2302 rtx insn;
2303 int uncritical;
2305 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2306 rtx addr = XEXP (SUBREG_REG (x), 0);
2307 enum machine_mode mode = GET_MODE (x);
2308 rtx result;
2310 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2311 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2312 && ! uncritical)
2313 abort ();
2315 if (BYTES_BIG_ENDIAN)
2316 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2317 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2318 addr = plus_constant (addr, offset);
2319 if (!flag_force_addr && memory_address_p (mode, addr))
2320 /* Shortcut if no insns need be emitted. */
2321 return change_address (SUBREG_REG (x), mode, addr);
2322 start_sequence ();
2323 result = change_address (SUBREG_REG (x), mode, addr);
2324 emit_insn_before (gen_sequence (), insn);
2325 end_sequence ();
2326 return result;
2329 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2330 Replace subexpressions of X in place.
2331 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2332 Otherwise return X, with its contents possibly altered.
2334 If any insns must be emitted to compute NEWADDR, put them before INSN.
2336 UNCRITICAL is as in fixup_memory_subreg. */
2338 static rtx
2339 walk_fixup_memory_subreg (x, insn, uncritical)
2340 register rtx x;
2341 rtx insn;
2342 int uncritical;
2344 register enum rtx_code code;
2345 register const char *fmt;
2346 register int i;
2348 if (x == 0)
2349 return 0;
2351 code = GET_CODE (x);
2353 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2354 return fixup_memory_subreg (x, insn, uncritical);
2356 /* Nothing special about this RTX; fix its operands. */
2358 fmt = GET_RTX_FORMAT (code);
2359 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2361 if (fmt[i] == 'e')
2362 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2363 if (fmt[i] == 'E')
2365 register int j;
2366 for (j = 0; j < XVECLEN (x, i); j++)
2367 XVECEXP (x, i, j)
2368 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2371 return x;
2374 /* For each memory ref within X, if it refers to a stack slot
2375 with an out of range displacement, put the address in a temp register
2376 (emitting new insns before INSN to load these registers)
2377 and alter the memory ref to use that register.
2378 Replace each such MEM rtx with a copy, to avoid clobberage. */
2380 static rtx
2381 fixup_stack_1 (x, insn)
2382 rtx x;
2383 rtx insn;
2385 register int i;
2386 register RTX_CODE code = GET_CODE (x);
2387 register const char *fmt;
2389 if (code == MEM)
2391 register rtx ad = XEXP (x, 0);
2392 /* If we have address of a stack slot but it's not valid
2393 (displacement is too large), compute the sum in a register. */
2394 if (GET_CODE (ad) == PLUS
2395 && GET_CODE (XEXP (ad, 0)) == REG
2396 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2397 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2398 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2399 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2400 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2401 #endif
2402 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2403 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2404 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2405 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2407 rtx temp, seq;
2408 if (memory_address_p (GET_MODE (x), ad))
2409 return x;
2411 start_sequence ();
2412 temp = copy_to_reg (ad);
2413 seq = gen_sequence ();
2414 end_sequence ();
2415 emit_insn_before (seq, insn);
2416 return change_address (x, VOIDmode, temp);
2418 return x;
2421 fmt = GET_RTX_FORMAT (code);
2422 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2424 if (fmt[i] == 'e')
2425 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2426 if (fmt[i] == 'E')
2428 register int j;
2429 for (j = 0; j < XVECLEN (x, i); j++)
2430 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2433 return x;
2436 /* Optimization: a bit-field instruction whose field
2437 happens to be a byte or halfword in memory
2438 can be changed to a move instruction.
2440 We call here when INSN is an insn to examine or store into a bit-field.
2441 BODY is the SET-rtx to be altered.
2443 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2444 (Currently this is called only from function.c, and EQUIV_MEM
2445 is always 0.) */
2447 static void
2448 optimize_bit_field (body, insn, equiv_mem)
2449 rtx body;
2450 rtx insn;
2451 rtx *equiv_mem;
2453 register rtx bitfield;
2454 int destflag;
2455 rtx seq = 0;
2456 enum machine_mode mode;
2458 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2459 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2460 bitfield = SET_DEST (body), destflag = 1;
2461 else
2462 bitfield = SET_SRC (body), destflag = 0;
2464 /* First check that the field being stored has constant size and position
2465 and is in fact a byte or halfword suitably aligned. */
2467 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2468 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2469 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2470 != BLKmode)
2471 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2473 register rtx memref = 0;
2475 /* Now check that the containing word is memory, not a register,
2476 and that it is safe to change the machine mode. */
2478 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2479 memref = XEXP (bitfield, 0);
2480 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2481 && equiv_mem != 0)
2482 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2483 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2484 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2485 memref = SUBREG_REG (XEXP (bitfield, 0));
2486 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2487 && equiv_mem != 0
2488 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2489 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2491 if (memref
2492 && ! mode_dependent_address_p (XEXP (memref, 0))
2493 && ! MEM_VOLATILE_P (memref))
2495 /* Now adjust the address, first for any subreg'ing
2496 that we are now getting rid of,
2497 and then for which byte of the word is wanted. */
2499 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2500 rtx insns;
2502 /* Adjust OFFSET to count bits from low-address byte. */
2503 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2504 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2505 - offset - INTVAL (XEXP (bitfield, 1)));
2507 /* Adjust OFFSET to count bytes from low-address byte. */
2508 offset /= BITS_PER_UNIT;
2509 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2511 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2512 if (BYTES_BIG_ENDIAN)
2513 offset -= (MIN (UNITS_PER_WORD,
2514 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2515 - MIN (UNITS_PER_WORD,
2516 GET_MODE_SIZE (GET_MODE (memref))));
2519 start_sequence ();
2520 memref = change_address (memref, mode,
2521 plus_constant (XEXP (memref, 0), offset));
2522 insns = get_insns ();
2523 end_sequence ();
2524 emit_insns_before (insns, insn);
2526 /* Store this memory reference where
2527 we found the bit field reference. */
2529 if (destflag)
2531 validate_change (insn, &SET_DEST (body), memref, 1);
2532 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2534 rtx src = SET_SRC (body);
2535 while (GET_CODE (src) == SUBREG
2536 && SUBREG_WORD (src) == 0)
2537 src = SUBREG_REG (src);
2538 if (GET_MODE (src) != GET_MODE (memref))
2539 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2540 validate_change (insn, &SET_SRC (body), src, 1);
2542 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2543 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2544 /* This shouldn't happen because anything that didn't have
2545 one of these modes should have got converted explicitly
2546 and then referenced through a subreg.
2547 This is so because the original bit-field was
2548 handled by agg_mode and so its tree structure had
2549 the same mode that memref now has. */
2550 abort ();
2552 else
2554 rtx dest = SET_DEST (body);
2556 while (GET_CODE (dest) == SUBREG
2557 && SUBREG_WORD (dest) == 0
2558 && (GET_MODE_CLASS (GET_MODE (dest))
2559 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2560 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2561 <= UNITS_PER_WORD))
2562 dest = SUBREG_REG (dest);
2564 validate_change (insn, &SET_DEST (body), dest, 1);
2566 if (GET_MODE (dest) == GET_MODE (memref))
2567 validate_change (insn, &SET_SRC (body), memref, 1);
2568 else
2570 /* Convert the mem ref to the destination mode. */
2571 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2573 start_sequence ();
2574 convert_move (newreg, memref,
2575 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2576 seq = get_insns ();
2577 end_sequence ();
2579 validate_change (insn, &SET_SRC (body), newreg, 1);
2583 /* See if we can convert this extraction or insertion into
2584 a simple move insn. We might not be able to do so if this
2585 was, for example, part of a PARALLEL.
2587 If we succeed, write out any needed conversions. If we fail,
2588 it is hard to guess why we failed, so don't do anything
2589 special; just let the optimization be suppressed. */
2591 if (apply_change_group () && seq)
2592 emit_insns_before (seq, insn);
2597 /* These routines are responsible for converting virtual register references
2598 to the actual hard register references once RTL generation is complete.
2600 The following four variables are used for communication between the
2601 routines. They contain the offsets of the virtual registers from their
2602 respective hard registers. */
2604 static int in_arg_offset;
2605 static int var_offset;
2606 static int dynamic_offset;
2607 static int out_arg_offset;
2608 static int cfa_offset;
2610 /* In most machines, the stack pointer register is equivalent to the bottom
2611 of the stack. */
2613 #ifndef STACK_POINTER_OFFSET
2614 #define STACK_POINTER_OFFSET 0
2615 #endif
2617 /* If not defined, pick an appropriate default for the offset of dynamically
2618 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2619 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2621 #ifndef STACK_DYNAMIC_OFFSET
2623 #ifdef ACCUMULATE_OUTGOING_ARGS
2624 /* The bottom of the stack points to the actual arguments. If
2625 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2626 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2627 stack space for register parameters is not pushed by the caller, but
2628 rather part of the fixed stack areas and hence not included in
2629 `current_function_outgoing_args_size'. Nevertheless, we must allow
2630 for it when allocating stack dynamic objects. */
2632 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2633 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2634 (current_function_outgoing_args_size \
2635 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2637 #else
2638 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2639 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2640 #endif
2642 #else
2643 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2644 #endif
2645 #endif
2647 /* On a few machines, the CFA coincides with the arg pointer. */
2649 #ifndef ARG_POINTER_CFA_OFFSET
2650 #define ARG_POINTER_CFA_OFFSET 0
2651 #endif
2654 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2655 its address taken. DECL is the decl for the object stored in the
2656 register, for later use if we do need to force REG into the stack.
2657 REG is overwritten by the MEM like in put_reg_into_stack. */
2660 gen_mem_addressof (reg, decl)
2661 rtx reg;
2662 tree decl;
2664 tree type = TREE_TYPE (decl);
2665 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2666 REGNO (reg), decl);
2667 /* If the original REG was a user-variable, then so is the REG whose
2668 address is being taken. */
2669 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2671 PUT_CODE (reg, MEM);
2672 PUT_MODE (reg, DECL_MODE (decl));
2673 XEXP (reg, 0) = r;
2674 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2675 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2676 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2678 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2679 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2681 return reg;
2684 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2686 #if 0
2687 void
2688 flush_addressof (decl)
2689 tree decl;
2691 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2692 && DECL_RTL (decl) != 0
2693 && GET_CODE (DECL_RTL (decl)) == MEM
2694 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2695 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2696 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2698 #endif
2700 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2702 static void
2703 put_addressof_into_stack (r, ht)
2704 rtx r;
2705 struct hash_table *ht;
2707 tree decl = ADDRESSOF_DECL (r);
2708 rtx reg = XEXP (r, 0);
2710 if (GET_CODE (reg) != REG)
2711 abort ();
2713 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2714 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2715 ADDRESSOF_REGNO (r),
2716 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2719 /* List of replacements made below in purge_addressof_1 when creating
2720 bitfield insertions. */
2721 static rtx purge_bitfield_addressof_replacements;
2723 /* List of replacements made below in purge_addressof_1 for patterns
2724 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2725 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2726 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2727 enough in complex cases, e.g. when some field values can be
2728 extracted by usage MEM with narrower mode. */
2729 static rtx purge_addressof_replacements;
2731 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2732 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2733 the stack. */
2735 static void
2736 purge_addressof_1 (loc, insn, force, store, ht)
2737 rtx *loc;
2738 rtx insn;
2739 int force, store;
2740 struct hash_table *ht;
2742 rtx x;
2743 RTX_CODE code;
2744 int i, j;
2745 const char *fmt;
2747 /* Re-start here to avoid recursion in common cases. */
2748 restart:
2750 x = *loc;
2751 if (x == 0)
2752 return;
2754 code = GET_CODE (x);
2756 /* If we don't return in any of the cases below, we will recurse inside
2757 the RTX, which will normally result in any ADDRESSOF being forced into
2758 memory. */
2759 if (code == SET)
2761 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2762 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2763 return;
2766 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2768 /* We must create a copy of the rtx because it was created by
2769 overwriting a REG rtx which is always shared. */
2770 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2771 rtx insns;
2773 if (validate_change (insn, loc, sub, 0)
2774 || validate_replace_rtx (x, sub, insn))
2775 return;
2777 start_sequence ();
2778 sub = force_operand (sub, NULL_RTX);
2779 if (! validate_change (insn, loc, sub, 0)
2780 && ! validate_replace_rtx (x, sub, insn))
2781 abort ();
2783 insns = gen_sequence ();
2784 end_sequence ();
2785 emit_insn_before (insns, insn);
2786 return;
2789 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2791 rtx sub = XEXP (XEXP (x, 0), 0);
2792 rtx sub2;
2794 if (GET_CODE (sub) == MEM)
2796 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2797 MEM_COPY_ATTRIBUTES (sub2, sub);
2798 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2799 sub = sub2;
2801 else if (GET_CODE (sub) == REG
2802 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2804 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2806 int size_x, size_sub;
2808 if (!insn)
2810 /* When processing REG_NOTES look at the list of
2811 replacements done on the insn to find the register that X
2812 was replaced by. */
2813 rtx tem;
2815 for (tem = purge_bitfield_addressof_replacements;
2816 tem != NULL_RTX;
2817 tem = XEXP (XEXP (tem, 1), 1))
2818 if (rtx_equal_p (x, XEXP (tem, 0)))
2820 *loc = XEXP (XEXP (tem, 1), 0);
2821 return;
2824 /* See comment for purge_addressof_replacements. */
2825 for (tem = purge_addressof_replacements;
2826 tem != NULL_RTX;
2827 tem = XEXP (XEXP (tem, 1), 1))
2828 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2830 rtx z = XEXP (XEXP (tem, 1), 0);
2832 if (GET_MODE (x) == GET_MODE (z)
2833 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2834 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2835 abort ();
2837 /* It can happen that the note may speak of things
2838 in a wider (or just different) mode than the
2839 code did. This is especially true of
2840 REG_RETVAL. */
2842 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2843 z = SUBREG_REG (z);
2845 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2846 && (GET_MODE_SIZE (GET_MODE (x))
2847 > GET_MODE_SIZE (GET_MODE (z))))
2849 /* This can occur as a result in invalid
2850 pointer casts, e.g. float f; ...
2851 *(long long int *)&f.
2852 ??? We could emit a warning here, but
2853 without a line number that wouldn't be
2854 very helpful. */
2855 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2857 else
2858 z = gen_lowpart (GET_MODE (x), z);
2860 *loc = z;
2861 return;
2864 /* There should always be such a replacement. */
2865 abort ();
2868 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2869 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2871 /* Don't even consider working with paradoxical subregs,
2872 or the moral equivalent seen here. */
2873 if (size_x <= size_sub
2874 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2876 /* Do a bitfield insertion to mirror what would happen
2877 in memory. */
2879 rtx val, seq;
2881 if (store)
2883 rtx p = PREV_INSN (insn);
2885 start_sequence ();
2886 val = gen_reg_rtx (GET_MODE (x));
2887 if (! validate_change (insn, loc, val, 0))
2889 /* Discard the current sequence and put the
2890 ADDRESSOF on stack. */
2891 end_sequence ();
2892 goto give_up;
2894 seq = gen_sequence ();
2895 end_sequence ();
2896 emit_insn_before (seq, insn);
2897 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2898 insn, ht);
2900 start_sequence ();
2901 store_bit_field (sub, size_x, 0, GET_MODE (x),
2902 val, GET_MODE_SIZE (GET_MODE (sub)),
2903 GET_MODE_SIZE (GET_MODE (sub)));
2905 /* Make sure to unshare any shared rtl that store_bit_field
2906 might have created. */
2907 for (p = get_insns(); p; p = NEXT_INSN (p))
2909 reset_used_flags (PATTERN (p));
2910 reset_used_flags (REG_NOTES (p));
2911 reset_used_flags (LOG_LINKS (p));
2913 unshare_all_rtl (get_insns ());
2915 seq = gen_sequence ();
2916 end_sequence ();
2917 p = emit_insn_after (seq, insn);
2918 if (NEXT_INSN (insn))
2919 compute_insns_for_mem (NEXT_INSN (insn),
2920 p ? NEXT_INSN (p) : NULL_RTX,
2921 ht);
2923 else
2925 rtx p = PREV_INSN (insn);
2927 start_sequence ();
2928 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2929 GET_MODE (x), GET_MODE (x),
2930 GET_MODE_SIZE (GET_MODE (sub)),
2931 GET_MODE_SIZE (GET_MODE (sub)));
2933 if (! validate_change (insn, loc, val, 0))
2935 /* Discard the current sequence and put the
2936 ADDRESSOF on stack. */
2937 end_sequence ();
2938 goto give_up;
2941 seq = gen_sequence ();
2942 end_sequence ();
2943 emit_insn_before (seq, insn);
2944 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2945 insn, ht);
2948 /* Remember the replacement so that the same one can be done
2949 on the REG_NOTES. */
2950 purge_bitfield_addressof_replacements
2951 = gen_rtx_EXPR_LIST (VOIDmode, x,
2952 gen_rtx_EXPR_LIST
2953 (VOIDmode, val,
2954 purge_bitfield_addressof_replacements));
2956 /* We replaced with a reg -- all done. */
2957 return;
2961 else if (validate_change (insn, loc, sub, 0))
2963 /* Remember the replacement so that the same one can be done
2964 on the REG_NOTES. */
2965 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
2967 rtx tem;
2969 for (tem = purge_addressof_replacements;
2970 tem != NULL_RTX;
2971 tem = XEXP (XEXP (tem, 1), 1))
2972 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2974 XEXP (XEXP (tem, 1), 0) = sub;
2975 return;
2977 purge_addressof_replacements
2978 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
2979 gen_rtx_EXPR_LIST (VOIDmode, sub,
2980 purge_addressof_replacements));
2981 return;
2983 goto restart;
2985 give_up:;
2986 /* else give up and put it into the stack */
2989 else if (code == ADDRESSOF)
2991 put_addressof_into_stack (x, ht);
2992 return;
2994 else if (code == SET)
2996 purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2997 purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2998 return;
3001 /* Scan all subexpressions. */
3002 fmt = GET_RTX_FORMAT (code);
3003 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3005 if (*fmt == 'e')
3006 purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3007 else if (*fmt == 'E')
3008 for (j = 0; j < XVECLEN (x, i); j++)
3009 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3013 /* Return a new hash table entry in HT. */
3015 static struct hash_entry *
3016 insns_for_mem_newfunc (he, ht, k)
3017 struct hash_entry *he;
3018 struct hash_table *ht;
3019 hash_table_key k ATTRIBUTE_UNUSED;
3021 struct insns_for_mem_entry *ifmhe;
3022 if (he)
3023 return he;
3025 ifmhe = ((struct insns_for_mem_entry *)
3026 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3027 ifmhe->insns = NULL_RTX;
3029 return &ifmhe->he;
3032 /* Return a hash value for K, a REG. */
3034 static unsigned long
3035 insns_for_mem_hash (k)
3036 hash_table_key k;
3038 /* K is really a RTX. Just use the address as the hash value. */
3039 return (unsigned long) k;
3042 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3044 static boolean
3045 insns_for_mem_comp (k1, k2)
3046 hash_table_key k1;
3047 hash_table_key k2;
3049 return k1 == k2;
3052 struct insns_for_mem_walk_info {
3053 /* The hash table that we are using to record which INSNs use which
3054 MEMs. */
3055 struct hash_table *ht;
3057 /* The INSN we are currently proessing. */
3058 rtx insn;
3060 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3061 to find the insns that use the REGs in the ADDRESSOFs. */
3062 int pass;
3065 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3066 that might be used in an ADDRESSOF expression, record this INSN in
3067 the hash table given by DATA (which is really a pointer to an
3068 insns_for_mem_walk_info structure). */
3070 static int
3071 insns_for_mem_walk (r, data)
3072 rtx *r;
3073 void *data;
3075 struct insns_for_mem_walk_info *ifmwi
3076 = (struct insns_for_mem_walk_info *) data;
3078 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3079 && GET_CODE (XEXP (*r, 0)) == REG)
3080 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3081 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3083 /* Lookup this MEM in the hashtable, creating it if necessary. */
3084 struct insns_for_mem_entry *ifme
3085 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3087 /*create=*/0,
3088 /*copy=*/0);
3090 /* If we have not already recorded this INSN, do so now. Since
3091 we process the INSNs in order, we know that if we have
3092 recorded it it must be at the front of the list. */
3093 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3095 /* We do the allocation on the same obstack as is used for
3096 the hash table since this memory will not be used once
3097 the hash table is deallocated. */
3098 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3099 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3100 ifme->insns);
3101 pop_obstacks ();
3105 return 0;
3108 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3109 which REGs in HT. */
3111 static void
3112 compute_insns_for_mem (insns, last_insn, ht)
3113 rtx insns;
3114 rtx last_insn;
3115 struct hash_table *ht;
3117 rtx insn;
3118 struct insns_for_mem_walk_info ifmwi;
3119 ifmwi.ht = ht;
3121 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3122 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3123 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3125 ifmwi.insn = insn;
3126 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3130 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3131 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3132 stack. */
3134 void
3135 purge_addressof (insns)
3136 rtx insns;
3138 rtx insn;
3139 struct hash_table ht;
3141 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3142 requires a fixup pass over the instruction stream to correct
3143 INSNs that depended on the REG being a REG, and not a MEM. But,
3144 these fixup passes are slow. Furthermore, more MEMs are not
3145 mentioned in very many instructions. So, we speed up the process
3146 by pre-calculating which REGs occur in which INSNs; that allows
3147 us to perform the fixup passes much more quickly. */
3148 hash_table_init (&ht,
3149 insns_for_mem_newfunc,
3150 insns_for_mem_hash,
3151 insns_for_mem_comp);
3152 compute_insns_for_mem (insns, NULL_RTX, &ht);
3154 for (insn = insns; insn; insn = NEXT_INSN (insn))
3155 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3156 || GET_CODE (insn) == CALL_INSN)
3158 purge_addressof_1 (&PATTERN (insn), insn,
3159 asm_noperands (PATTERN (insn)) > 0, 0, &ht);
3160 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht);
3163 /* Clean up. */
3164 hash_table_free (&ht);
3165 purge_bitfield_addressof_replacements = 0;
3166 purge_addressof_replacements = 0;
3169 /* Pass through the INSNS of function FNDECL and convert virtual register
3170 references to hard register references. */
3172 void
3173 instantiate_virtual_regs (fndecl, insns)
3174 tree fndecl;
3175 rtx insns;
3177 rtx insn;
3178 int i;
3180 /* Compute the offsets to use for this function. */
3181 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3182 var_offset = STARTING_FRAME_OFFSET;
3183 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3184 out_arg_offset = STACK_POINTER_OFFSET;
3185 cfa_offset = ARG_POINTER_CFA_OFFSET;
3187 /* Scan all variables and parameters of this function. For each that is
3188 in memory, instantiate all virtual registers if the result is a valid
3189 address. If not, we do it later. That will handle most uses of virtual
3190 regs on many machines. */
3191 instantiate_decls (fndecl, 1);
3193 /* Initialize recognition, indicating that volatile is OK. */
3194 init_recog ();
3196 /* Scan through all the insns, instantiating every virtual register still
3197 present. */
3198 for (insn = insns; insn; insn = NEXT_INSN (insn))
3199 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3200 || GET_CODE (insn) == CALL_INSN)
3202 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3203 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3206 /* Instantiate the stack slots for the parm registers, for later use in
3207 addressof elimination. */
3208 for (i = 0; i < max_parm_reg; ++i)
3209 if (parm_reg_stack_loc[i])
3210 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3212 /* Now instantiate the remaining register equivalences for debugging info.
3213 These will not be valid addresses. */
3214 instantiate_decls (fndecl, 0);
3216 /* Indicate that, from now on, assign_stack_local should use
3217 frame_pointer_rtx. */
3218 virtuals_instantiated = 1;
3221 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3222 all virtual registers in their DECL_RTL's.
3224 If VALID_ONLY, do this only if the resulting address is still valid.
3225 Otherwise, always do it. */
3227 static void
3228 instantiate_decls (fndecl, valid_only)
3229 tree fndecl;
3230 int valid_only;
3232 tree decl;
3234 if (DECL_SAVED_INSNS (fndecl))
3235 /* When compiling an inline function, the obstack used for
3236 rtl allocation is the maybepermanent_obstack. Calling
3237 `resume_temporary_allocation' switches us back to that
3238 obstack while we process this function's parameters. */
3239 resume_temporary_allocation ();
3241 /* Process all parameters of the function. */
3242 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3244 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3246 instantiate_decl (DECL_RTL (decl), size, valid_only);
3248 /* If the parameter was promoted, then the incoming RTL mode may be
3249 larger than the declared type size. We must use the larger of
3250 the two sizes. */
3251 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3252 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3255 /* Now process all variables defined in the function or its subblocks. */
3256 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3258 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3260 /* Save all rtl allocated for this function by raising the
3261 high-water mark on the maybepermanent_obstack. */
3262 preserve_data ();
3263 /* All further rtl allocation is now done in the current_obstack. */
3264 rtl_in_current_obstack ();
3268 /* Subroutine of instantiate_decls: Process all decls in the given
3269 BLOCK node and all its subblocks. */
3271 static void
3272 instantiate_decls_1 (let, valid_only)
3273 tree let;
3274 int valid_only;
3276 tree t;
3278 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3279 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3280 valid_only);
3282 /* Process all subblocks. */
3283 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3284 instantiate_decls_1 (t, valid_only);
3287 /* Subroutine of the preceding procedures: Given RTL representing a
3288 decl and the size of the object, do any instantiation required.
3290 If VALID_ONLY is non-zero, it means that the RTL should only be
3291 changed if the new address is valid. */
3293 static void
3294 instantiate_decl (x, size, valid_only)
3295 rtx x;
3296 int size;
3297 int valid_only;
3299 enum machine_mode mode;
3300 rtx addr;
3302 /* If this is not a MEM, no need to do anything. Similarly if the
3303 address is a constant or a register that is not a virtual register. */
3305 if (x == 0 || GET_CODE (x) != MEM)
3306 return;
3308 addr = XEXP (x, 0);
3309 if (CONSTANT_P (addr)
3310 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3311 || (GET_CODE (addr) == REG
3312 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3313 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3314 return;
3316 /* If we should only do this if the address is valid, copy the address.
3317 We need to do this so we can undo any changes that might make the
3318 address invalid. This copy is unfortunate, but probably can't be
3319 avoided. */
3321 if (valid_only)
3322 addr = copy_rtx (addr);
3324 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3326 if (valid_only)
3328 /* Now verify that the resulting address is valid for every integer or
3329 floating-point mode up to and including SIZE bytes long. We do this
3330 since the object might be accessed in any mode and frame addresses
3331 are shared. */
3333 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3334 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3335 mode = GET_MODE_WIDER_MODE (mode))
3336 if (! memory_address_p (mode, addr))
3337 return;
3339 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3340 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3341 mode = GET_MODE_WIDER_MODE (mode))
3342 if (! memory_address_p (mode, addr))
3343 return;
3346 /* Put back the address now that we have updated it and we either know
3347 it is valid or we don't care whether it is valid. */
3349 XEXP (x, 0) = addr;
3352 /* Given a pointer to a piece of rtx and an optional pointer to the
3353 containing object, instantiate any virtual registers present in it.
3355 If EXTRA_INSNS, we always do the replacement and generate
3356 any extra insns before OBJECT. If it zero, we do nothing if replacement
3357 is not valid.
3359 Return 1 if we either had nothing to do or if we were able to do the
3360 needed replacement. Return 0 otherwise; we only return zero if
3361 EXTRA_INSNS is zero.
3363 We first try some simple transformations to avoid the creation of extra
3364 pseudos. */
3366 static int
3367 instantiate_virtual_regs_1 (loc, object, extra_insns)
3368 rtx *loc;
3369 rtx object;
3370 int extra_insns;
3372 rtx x;
3373 RTX_CODE code;
3374 rtx new = 0;
3375 HOST_WIDE_INT offset = 0;
3376 rtx temp;
3377 rtx seq;
3378 int i, j;
3379 const char *fmt;
3381 /* Re-start here to avoid recursion in common cases. */
3382 restart:
3384 x = *loc;
3385 if (x == 0)
3386 return 1;
3388 code = GET_CODE (x);
3390 /* Check for some special cases. */
3391 switch (code)
3393 case CONST_INT:
3394 case CONST_DOUBLE:
3395 case CONST:
3396 case SYMBOL_REF:
3397 case CODE_LABEL:
3398 case PC:
3399 case CC0:
3400 case ASM_INPUT:
3401 case ADDR_VEC:
3402 case ADDR_DIFF_VEC:
3403 case RETURN:
3404 return 1;
3406 case SET:
3407 /* We are allowed to set the virtual registers. This means that
3408 the actual register should receive the source minus the
3409 appropriate offset. This is used, for example, in the handling
3410 of non-local gotos. */
3411 if (SET_DEST (x) == virtual_incoming_args_rtx)
3412 new = arg_pointer_rtx, offset = - in_arg_offset;
3413 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3414 new = frame_pointer_rtx, offset = - var_offset;
3415 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3416 new = stack_pointer_rtx, offset = - dynamic_offset;
3417 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3418 new = stack_pointer_rtx, offset = - out_arg_offset;
3419 else if (SET_DEST (x) == virtual_cfa_rtx)
3420 new = arg_pointer_rtx, offset = - cfa_offset;
3422 if (new)
3424 /* The only valid sources here are PLUS or REG. Just do
3425 the simplest possible thing to handle them. */
3426 if (GET_CODE (SET_SRC (x)) != REG
3427 && GET_CODE (SET_SRC (x)) != PLUS)
3428 abort ();
3430 start_sequence ();
3431 if (GET_CODE (SET_SRC (x)) != REG)
3432 temp = force_operand (SET_SRC (x), NULL_RTX);
3433 else
3434 temp = SET_SRC (x);
3435 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3436 seq = get_insns ();
3437 end_sequence ();
3439 emit_insns_before (seq, object);
3440 SET_DEST (x) = new;
3442 if (! validate_change (object, &SET_SRC (x), temp, 0)
3443 || ! extra_insns)
3444 abort ();
3446 return 1;
3449 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3450 loc = &SET_SRC (x);
3451 goto restart;
3453 case PLUS:
3454 /* Handle special case of virtual register plus constant. */
3455 if (CONSTANT_P (XEXP (x, 1)))
3457 rtx old, new_offset;
3459 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3460 if (GET_CODE (XEXP (x, 0)) == PLUS)
3462 rtx inner = XEXP (XEXP (x, 0), 0);
3464 if (inner == virtual_incoming_args_rtx)
3465 new = arg_pointer_rtx, offset = in_arg_offset;
3466 else if (inner == virtual_stack_vars_rtx)
3467 new = frame_pointer_rtx, offset = var_offset;
3468 else if (inner == virtual_stack_dynamic_rtx)
3469 new = stack_pointer_rtx, offset = dynamic_offset;
3470 else if (inner == virtual_outgoing_args_rtx)
3471 new = stack_pointer_rtx, offset = out_arg_offset;
3472 else if (inner == virtual_cfa_rtx)
3473 new = arg_pointer_rtx, offset = cfa_offset;
3474 else
3476 loc = &XEXP (x, 0);
3477 goto restart;
3480 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3481 extra_insns);
3482 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3485 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3486 new = arg_pointer_rtx, offset = in_arg_offset;
3487 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3488 new = frame_pointer_rtx, offset = var_offset;
3489 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3490 new = stack_pointer_rtx, offset = dynamic_offset;
3491 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3492 new = stack_pointer_rtx, offset = out_arg_offset;
3493 else if (XEXP (x, 0) == virtual_cfa_rtx)
3494 new = arg_pointer_rtx, offset = cfa_offset;
3495 else
3497 /* We know the second operand is a constant. Unless the
3498 first operand is a REG (which has been already checked),
3499 it needs to be checked. */
3500 if (GET_CODE (XEXP (x, 0)) != REG)
3502 loc = &XEXP (x, 0);
3503 goto restart;
3505 return 1;
3508 new_offset = plus_constant (XEXP (x, 1), offset);
3510 /* If the new constant is zero, try to replace the sum with just
3511 the register. */
3512 if (new_offset == const0_rtx
3513 && validate_change (object, loc, new, 0))
3514 return 1;
3516 /* Next try to replace the register and new offset.
3517 There are two changes to validate here and we can't assume that
3518 in the case of old offset equals new just changing the register
3519 will yield a valid insn. In the interests of a little efficiency,
3520 however, we only call validate change once (we don't queue up the
3521 changes and then call apply_change_group). */
3523 old = XEXP (x, 0);
3524 if (offset == 0
3525 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3526 : (XEXP (x, 0) = new,
3527 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3529 if (! extra_insns)
3531 XEXP (x, 0) = old;
3532 return 0;
3535 /* Otherwise copy the new constant into a register and replace
3536 constant with that register. */
3537 temp = gen_reg_rtx (Pmode);
3538 XEXP (x, 0) = new;
3539 if (validate_change (object, &XEXP (x, 1), temp, 0))
3540 emit_insn_before (gen_move_insn (temp, new_offset), object);
3541 else
3543 /* If that didn't work, replace this expression with a
3544 register containing the sum. */
3546 XEXP (x, 0) = old;
3547 new = gen_rtx_PLUS (Pmode, new, new_offset);
3549 start_sequence ();
3550 temp = force_operand (new, NULL_RTX);
3551 seq = get_insns ();
3552 end_sequence ();
3554 emit_insns_before (seq, object);
3555 if (! validate_change (object, loc, temp, 0)
3556 && ! validate_replace_rtx (x, temp, object))
3557 abort ();
3561 return 1;
3564 /* Fall through to generic two-operand expression case. */
3565 case EXPR_LIST:
3566 case CALL:
3567 case COMPARE:
3568 case MINUS:
3569 case MULT:
3570 case DIV: case UDIV:
3571 case MOD: case UMOD:
3572 case AND: case IOR: case XOR:
3573 case ROTATERT: case ROTATE:
3574 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3575 case NE: case EQ:
3576 case GE: case GT: case GEU: case GTU:
3577 case LE: case LT: case LEU: case LTU:
3578 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3579 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3580 loc = &XEXP (x, 0);
3581 goto restart;
3583 case MEM:
3584 /* Most cases of MEM that convert to valid addresses have already been
3585 handled by our scan of decls. The only special handling we
3586 need here is to make a copy of the rtx to ensure it isn't being
3587 shared if we have to change it to a pseudo.
3589 If the rtx is a simple reference to an address via a virtual register,
3590 it can potentially be shared. In such cases, first try to make it
3591 a valid address, which can also be shared. Otherwise, copy it and
3592 proceed normally.
3594 First check for common cases that need no processing. These are
3595 usually due to instantiation already being done on a previous instance
3596 of a shared rtx. */
3598 temp = XEXP (x, 0);
3599 if (CONSTANT_ADDRESS_P (temp)
3600 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3601 || temp == arg_pointer_rtx
3602 #endif
3603 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3604 || temp == hard_frame_pointer_rtx
3605 #endif
3606 || temp == frame_pointer_rtx)
3607 return 1;
3609 if (GET_CODE (temp) == PLUS
3610 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3611 && (XEXP (temp, 0) == frame_pointer_rtx
3612 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3613 || XEXP (temp, 0) == hard_frame_pointer_rtx
3614 #endif
3615 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3616 || XEXP (temp, 0) == arg_pointer_rtx
3617 #endif
3619 return 1;
3621 if (temp == virtual_stack_vars_rtx
3622 || temp == virtual_incoming_args_rtx
3623 || (GET_CODE (temp) == PLUS
3624 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3625 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3626 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3628 /* This MEM may be shared. If the substitution can be done without
3629 the need to generate new pseudos, we want to do it in place
3630 so all copies of the shared rtx benefit. The call below will
3631 only make substitutions if the resulting address is still
3632 valid.
3634 Note that we cannot pass X as the object in the recursive call
3635 since the insn being processed may not allow all valid
3636 addresses. However, if we were not passed on object, we can
3637 only modify X without copying it if X will have a valid
3638 address.
3640 ??? Also note that this can still lose if OBJECT is an insn that
3641 has less restrictions on an address that some other insn.
3642 In that case, we will modify the shared address. This case
3643 doesn't seem very likely, though. One case where this could
3644 happen is in the case of a USE or CLOBBER reference, but we
3645 take care of that below. */
3647 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3648 object ? object : x, 0))
3649 return 1;
3651 /* Otherwise make a copy and process that copy. We copy the entire
3652 RTL expression since it might be a PLUS which could also be
3653 shared. */
3654 *loc = x = copy_rtx (x);
3657 /* Fall through to generic unary operation case. */
3658 case SUBREG:
3659 case STRICT_LOW_PART:
3660 case NEG: case NOT:
3661 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3662 case SIGN_EXTEND: case ZERO_EXTEND:
3663 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3664 case FLOAT: case FIX:
3665 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3666 case ABS:
3667 case SQRT:
3668 case FFS:
3669 /* These case either have just one operand or we know that we need not
3670 check the rest of the operands. */
3671 loc = &XEXP (x, 0);
3672 goto restart;
3674 case USE:
3675 case CLOBBER:
3676 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3677 go ahead and make the invalid one, but do it to a copy. For a REG,
3678 just make the recursive call, since there's no chance of a problem. */
3680 if ((GET_CODE (XEXP (x, 0)) == MEM
3681 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3683 || (GET_CODE (XEXP (x, 0)) == REG
3684 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3685 return 1;
3687 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3688 loc = &XEXP (x, 0);
3689 goto restart;
3691 case REG:
3692 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3693 in front of this insn and substitute the temporary. */
3694 if (x == virtual_incoming_args_rtx)
3695 new = arg_pointer_rtx, offset = in_arg_offset;
3696 else if (x == virtual_stack_vars_rtx)
3697 new = frame_pointer_rtx, offset = var_offset;
3698 else if (x == virtual_stack_dynamic_rtx)
3699 new = stack_pointer_rtx, offset = dynamic_offset;
3700 else if (x == virtual_outgoing_args_rtx)
3701 new = stack_pointer_rtx, offset = out_arg_offset;
3702 else if (x == virtual_cfa_rtx)
3703 new = arg_pointer_rtx, offset = cfa_offset;
3705 if (new)
3707 temp = plus_constant (new, offset);
3708 if (!validate_change (object, loc, temp, 0))
3710 if (! extra_insns)
3711 return 0;
3713 start_sequence ();
3714 temp = force_operand (temp, NULL_RTX);
3715 seq = get_insns ();
3716 end_sequence ();
3718 emit_insns_before (seq, object);
3719 if (! validate_change (object, loc, temp, 0)
3720 && ! validate_replace_rtx (x, temp, object))
3721 abort ();
3725 return 1;
3727 case ADDRESSOF:
3728 if (GET_CODE (XEXP (x, 0)) == REG)
3729 return 1;
3731 else if (GET_CODE (XEXP (x, 0)) == MEM)
3733 /* If we have a (addressof (mem ..)), do any instantiation inside
3734 since we know we'll be making the inside valid when we finally
3735 remove the ADDRESSOF. */
3736 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3737 return 1;
3739 break;
3741 default:
3742 break;
3745 /* Scan all subexpressions. */
3746 fmt = GET_RTX_FORMAT (code);
3747 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3748 if (*fmt == 'e')
3750 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3751 return 0;
3753 else if (*fmt == 'E')
3754 for (j = 0; j < XVECLEN (x, i); j++)
3755 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3756 extra_insns))
3757 return 0;
3759 return 1;
3762 /* Optimization: assuming this function does not receive nonlocal gotos,
3763 delete the handlers for such, as well as the insns to establish
3764 and disestablish them. */
3766 static void
3767 delete_handlers ()
3769 rtx insn;
3770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3772 /* Delete the handler by turning off the flag that would
3773 prevent jump_optimize from deleting it.
3774 Also permit deletion of the nonlocal labels themselves
3775 if nothing local refers to them. */
3776 if (GET_CODE (insn) == CODE_LABEL)
3778 tree t, last_t;
3780 LABEL_PRESERVE_P (insn) = 0;
3782 /* Remove it from the nonlocal_label list, to avoid confusing
3783 flow. */
3784 for (t = nonlocal_labels, last_t = 0; t;
3785 last_t = t, t = TREE_CHAIN (t))
3786 if (DECL_RTL (TREE_VALUE (t)) == insn)
3787 break;
3788 if (t)
3790 if (! last_t)
3791 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3792 else
3793 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3796 if (GET_CODE (insn) == INSN)
3798 int can_delete = 0;
3799 rtx t;
3800 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3801 if (reg_mentioned_p (t, PATTERN (insn)))
3803 can_delete = 1;
3804 break;
3806 if (can_delete
3807 || (nonlocal_goto_stack_level != 0
3808 && reg_mentioned_p (nonlocal_goto_stack_level,
3809 PATTERN (insn))))
3810 delete_insn (insn);
3815 /* Output a USE for any register use in RTL.
3816 This is used with -noreg to mark the extent of lifespan
3817 of any registers used in a user-visible variable's DECL_RTL. */
3819 void
3820 use_variable (rtl)
3821 rtx rtl;
3823 if (GET_CODE (rtl) == REG)
3824 /* This is a register variable. */
3825 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3826 else if (GET_CODE (rtl) == MEM
3827 && GET_CODE (XEXP (rtl, 0)) == REG
3828 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3829 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3830 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3831 /* This is a variable-sized structure. */
3832 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3835 /* Like use_variable except that it outputs the USEs after INSN
3836 instead of at the end of the insn-chain. */
3838 void
3839 use_variable_after (rtl, insn)
3840 rtx rtl, insn;
3842 if (GET_CODE (rtl) == REG)
3843 /* This is a register variable. */
3844 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3845 else if (GET_CODE (rtl) == MEM
3846 && GET_CODE (XEXP (rtl, 0)) == REG
3847 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3848 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3849 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3850 /* This is a variable-sized structure. */
3851 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3855 max_parm_reg_num ()
3857 return max_parm_reg;
3860 /* Return the first insn following those generated by `assign_parms'. */
3863 get_first_nonparm_insn ()
3865 if (last_parm_insn)
3866 return NEXT_INSN (last_parm_insn);
3867 return get_insns ();
3870 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3871 Crash if there is none. */
3874 get_first_block_beg ()
3876 register rtx searcher;
3877 register rtx insn = get_first_nonparm_insn ();
3879 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3880 if (GET_CODE (searcher) == NOTE
3881 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3882 return searcher;
3884 abort (); /* Invalid call to this function. (See comments above.) */
3885 return NULL_RTX;
3888 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3889 This means a type for which function calls must pass an address to the
3890 function or get an address back from the function.
3891 EXP may be a type node or an expression (whose type is tested). */
3894 aggregate_value_p (exp)
3895 tree exp;
3897 int i, regno, nregs;
3898 rtx reg;
3899 tree type;
3900 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3901 type = exp;
3902 else
3903 type = TREE_TYPE (exp);
3905 if (RETURN_IN_MEMORY (type))
3906 return 1;
3907 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3908 and thus can't be returned in registers. */
3909 if (TREE_ADDRESSABLE (type))
3910 return 1;
3911 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3912 return 1;
3913 /* Make sure we have suitable call-clobbered regs to return
3914 the value in; if not, we must return it in memory. */
3915 reg = hard_function_value (type, 0);
3917 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3918 it is OK. */
3919 if (GET_CODE (reg) != REG)
3920 return 0;
3922 regno = REGNO (reg);
3923 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3924 for (i = 0; i < nregs; i++)
3925 if (! call_used_regs[regno + i])
3926 return 1;
3927 return 0;
3930 /* Assign RTL expressions to the function's parameters.
3931 This may involve copying them into registers and using
3932 those registers as the RTL for them. */
3934 void
3935 assign_parms (fndecl)
3936 tree fndecl;
3938 register tree parm;
3939 register rtx entry_parm = 0;
3940 register rtx stack_parm = 0;
3941 CUMULATIVE_ARGS args_so_far;
3942 enum machine_mode promoted_mode, passed_mode;
3943 enum machine_mode nominal_mode, promoted_nominal_mode;
3944 int unsignedp;
3945 /* Total space needed so far for args on the stack,
3946 given as a constant and a tree-expression. */
3947 struct args_size stack_args_size;
3948 tree fntype = TREE_TYPE (fndecl);
3949 tree fnargs = DECL_ARGUMENTS (fndecl);
3950 /* This is used for the arg pointer when referring to stack args. */
3951 rtx internal_arg_pointer;
3952 /* This is a dummy PARM_DECL that we used for the function result if
3953 the function returns a structure. */
3954 tree function_result_decl = 0;
3955 #ifdef SETUP_INCOMING_VARARGS
3956 int varargs_setup = 0;
3957 #endif
3958 rtx conversion_insns = 0;
3960 /* Nonzero if the last arg is named `__builtin_va_alist',
3961 which is used on some machines for old-fashioned non-ANSI varargs.h;
3962 this should be stuck onto the stack as if it had arrived there. */
3963 int hide_last_arg
3964 = (current_function_varargs
3965 && fnargs
3966 && (parm = tree_last (fnargs)) != 0
3967 && DECL_NAME (parm)
3968 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3969 "__builtin_va_alist")));
3971 /* Nonzero if function takes extra anonymous args.
3972 This means the last named arg must be on the stack
3973 right before the anonymous ones. */
3974 int stdarg
3975 = (TYPE_ARG_TYPES (fntype) != 0
3976 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3977 != void_type_node));
3979 current_function_stdarg = stdarg;
3981 /* If the reg that the virtual arg pointer will be translated into is
3982 not a fixed reg or is the stack pointer, make a copy of the virtual
3983 arg pointer, and address parms via the copy. The frame pointer is
3984 considered fixed even though it is not marked as such.
3986 The second time through, simply use ap to avoid generating rtx. */
3988 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3989 || ! (fixed_regs[ARG_POINTER_REGNUM]
3990 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3991 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3992 else
3993 internal_arg_pointer = virtual_incoming_args_rtx;
3994 current_function_internal_arg_pointer = internal_arg_pointer;
3996 stack_args_size.constant = 0;
3997 stack_args_size.var = 0;
3999 /* If struct value address is treated as the first argument, make it so. */
4000 if (aggregate_value_p (DECL_RESULT (fndecl))
4001 && ! current_function_returns_pcc_struct
4002 && struct_value_incoming_rtx == 0)
4004 tree type = build_pointer_type (TREE_TYPE (fntype));
4006 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4008 DECL_ARG_TYPE (function_result_decl) = type;
4009 TREE_CHAIN (function_result_decl) = fnargs;
4010 fnargs = function_result_decl;
4013 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4014 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4016 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4017 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4018 #else
4019 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4020 #endif
4022 /* We haven't yet found an argument that we must push and pretend the
4023 caller did. */
4024 current_function_pretend_args_size = 0;
4026 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4028 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4029 struct args_size stack_offset;
4030 struct args_size arg_size;
4031 int passed_pointer = 0;
4032 int did_conversion = 0;
4033 tree passed_type = DECL_ARG_TYPE (parm);
4034 tree nominal_type = TREE_TYPE (parm);
4035 int pretend_named;
4037 /* Set LAST_NAMED if this is last named arg before some
4038 anonymous args. */
4039 int last_named = ((TREE_CHAIN (parm) == 0
4040 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4041 && (stdarg || current_function_varargs));
4042 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4043 most machines, if this is a varargs/stdarg function, then we treat
4044 the last named arg as if it were anonymous too. */
4045 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4047 if (TREE_TYPE (parm) == error_mark_node
4048 /* This can happen after weird syntax errors
4049 or if an enum type is defined among the parms. */
4050 || TREE_CODE (parm) != PARM_DECL
4051 || passed_type == NULL)
4053 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4054 = gen_rtx_MEM (BLKmode, const0_rtx);
4055 TREE_USED (parm) = 1;
4056 continue;
4059 /* For varargs.h function, save info about regs and stack space
4060 used by the individual args, not including the va_alist arg. */
4061 if (hide_last_arg && last_named)
4062 current_function_args_info = args_so_far;
4064 /* Find mode of arg as it is passed, and mode of arg
4065 as it should be during execution of this function. */
4066 passed_mode = TYPE_MODE (passed_type);
4067 nominal_mode = TYPE_MODE (nominal_type);
4069 /* If the parm's mode is VOID, its value doesn't matter,
4070 and avoid the usual things like emit_move_insn that could crash. */
4071 if (nominal_mode == VOIDmode)
4073 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4074 continue;
4077 /* If the parm is to be passed as a transparent union, use the
4078 type of the first field for the tests below. We have already
4079 verified that the modes are the same. */
4080 if (DECL_TRANSPARENT_UNION (parm)
4081 || TYPE_TRANSPARENT_UNION (passed_type))
4082 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4084 /* See if this arg was passed by invisible reference. It is if
4085 it is an object whose size depends on the contents of the
4086 object itself or if the machine requires these objects be passed
4087 that way. */
4089 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4090 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4091 || TREE_ADDRESSABLE (passed_type)
4092 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4093 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4094 passed_type, named_arg)
4095 #endif
4098 passed_type = nominal_type = build_pointer_type (passed_type);
4099 passed_pointer = 1;
4100 passed_mode = nominal_mode = Pmode;
4103 promoted_mode = passed_mode;
4105 #ifdef PROMOTE_FUNCTION_ARGS
4106 /* Compute the mode in which the arg is actually extended to. */
4107 unsignedp = TREE_UNSIGNED (passed_type);
4108 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4109 #endif
4111 /* Let machine desc say which reg (if any) the parm arrives in.
4112 0 means it arrives on the stack. */
4113 #ifdef FUNCTION_INCOMING_ARG
4114 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4115 passed_type, named_arg);
4116 #else
4117 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4118 passed_type, named_arg);
4119 #endif
4121 if (entry_parm == 0)
4122 promoted_mode = passed_mode;
4124 #ifdef SETUP_INCOMING_VARARGS
4125 /* If this is the last named parameter, do any required setup for
4126 varargs or stdargs. We need to know about the case of this being an
4127 addressable type, in which case we skip the registers it
4128 would have arrived in.
4130 For stdargs, LAST_NAMED will be set for two parameters, the one that
4131 is actually the last named, and the dummy parameter. We only
4132 want to do this action once.
4134 Also, indicate when RTL generation is to be suppressed. */
4135 if (last_named && !varargs_setup)
4137 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4138 current_function_pretend_args_size, 0);
4139 varargs_setup = 1;
4141 #endif
4143 /* Determine parm's home in the stack,
4144 in case it arrives in the stack or we should pretend it did.
4146 Compute the stack position and rtx where the argument arrives
4147 and its size.
4149 There is one complexity here: If this was a parameter that would
4150 have been passed in registers, but wasn't only because it is
4151 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4152 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4153 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4154 0 as it was the previous time. */
4156 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4157 locate_and_pad_parm (promoted_mode, passed_type,
4158 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4160 #else
4161 #ifdef FUNCTION_INCOMING_ARG
4162 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4163 passed_type,
4164 pretend_named) != 0,
4165 #else
4166 FUNCTION_ARG (args_so_far, promoted_mode,
4167 passed_type,
4168 pretend_named) != 0,
4169 #endif
4170 #endif
4171 fndecl, &stack_args_size, &stack_offset, &arg_size);
4174 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4176 if (offset_rtx == const0_rtx)
4177 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4178 else
4179 stack_parm = gen_rtx_MEM (promoted_mode,
4180 gen_rtx_PLUS (Pmode,
4181 internal_arg_pointer,
4182 offset_rtx));
4184 /* If this is a memory ref that contains aggregate components,
4185 mark it as such for cse and loop optimize. Likewise if it
4186 is readonly. */
4187 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4188 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4189 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4192 /* If this parameter was passed both in registers and in the stack,
4193 use the copy on the stack. */
4194 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4195 entry_parm = 0;
4197 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4198 /* If this parm was passed part in regs and part in memory,
4199 pretend it arrived entirely in memory
4200 by pushing the register-part onto the stack.
4202 In the special case of a DImode or DFmode that is split,
4203 we could put it together in a pseudoreg directly,
4204 but for now that's not worth bothering with. */
4206 if (entry_parm)
4208 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4209 passed_type, named_arg);
4211 if (nregs > 0)
4213 current_function_pretend_args_size
4214 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4215 / (PARM_BOUNDARY / BITS_PER_UNIT)
4216 * (PARM_BOUNDARY / BITS_PER_UNIT));
4218 /* Handle calls that pass values in multiple non-contiguous
4219 locations. The Irix 6 ABI has examples of this. */
4220 if (GET_CODE (entry_parm) == PARALLEL)
4221 emit_group_store (validize_mem (stack_parm), entry_parm,
4222 int_size_in_bytes (TREE_TYPE (parm)),
4223 (TYPE_ALIGN (TREE_TYPE (parm))
4224 / BITS_PER_UNIT));
4225 else
4226 move_block_from_reg (REGNO (entry_parm),
4227 validize_mem (stack_parm), nregs,
4228 int_size_in_bytes (TREE_TYPE (parm)));
4230 entry_parm = stack_parm;
4233 #endif
4235 /* If we didn't decide this parm came in a register,
4236 by default it came on the stack. */
4237 if (entry_parm == 0)
4238 entry_parm = stack_parm;
4240 /* Record permanently how this parm was passed. */
4241 DECL_INCOMING_RTL (parm) = entry_parm;
4243 /* If there is actually space on the stack for this parm,
4244 count it in stack_args_size; otherwise set stack_parm to 0
4245 to indicate there is no preallocated stack slot for the parm. */
4247 if (entry_parm == stack_parm
4248 || (GET_CODE (entry_parm) == PARALLEL
4249 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4250 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4251 /* On some machines, even if a parm value arrives in a register
4252 there is still an (uninitialized) stack slot allocated for it.
4254 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4255 whether this parameter already has a stack slot allocated,
4256 because an arg block exists only if current_function_args_size
4257 is larger than some threshold, and we haven't calculated that
4258 yet. So, for now, we just assume that stack slots never exist
4259 in this case. */
4260 || REG_PARM_STACK_SPACE (fndecl) > 0
4261 #endif
4264 stack_args_size.constant += arg_size.constant;
4265 if (arg_size.var)
4266 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4268 else
4269 /* No stack slot was pushed for this parm. */
4270 stack_parm = 0;
4272 /* Update info on where next arg arrives in registers. */
4274 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4275 passed_type, named_arg);
4277 /* If we can't trust the parm stack slot to be aligned enough
4278 for its ultimate type, don't use that slot after entry.
4279 We'll make another stack slot, if we need one. */
4281 int thisparm_boundary
4282 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4284 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4285 stack_parm = 0;
4288 /* If parm was passed in memory, and we need to convert it on entry,
4289 don't store it back in that same slot. */
4290 if (entry_parm != 0
4291 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4292 stack_parm = 0;
4294 #if 0
4295 /* Now adjust STACK_PARM to the mode and precise location
4296 where this parameter should live during execution,
4297 if we discover that it must live in the stack during execution.
4298 To make debuggers happier on big-endian machines, we store
4299 the value in the last bytes of the space available. */
4301 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4302 && stack_parm != 0)
4304 rtx offset_rtx;
4306 if (BYTES_BIG_ENDIAN
4307 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4308 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4309 - GET_MODE_SIZE (nominal_mode));
4311 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4312 if (offset_rtx == const0_rtx)
4313 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4314 else
4315 stack_parm = gen_rtx_MEM (nominal_mode,
4316 gen_rtx_PLUS (Pmode,
4317 internal_arg_pointer,
4318 offset_rtx));
4320 /* If this is a memory ref that contains aggregate components,
4321 mark it as such for cse and loop optimize. */
4322 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4324 #endif /* 0 */
4326 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4327 in the mode in which it arrives.
4328 STACK_PARM is an RTX for a stack slot where the parameter can live
4329 during the function (in case we want to put it there).
4330 STACK_PARM is 0 if no stack slot was pushed for it.
4332 Now output code if necessary to convert ENTRY_PARM to
4333 the type in which this function declares it,
4334 and store that result in an appropriate place,
4335 which may be a pseudo reg, may be STACK_PARM,
4336 or may be a local stack slot if STACK_PARM is 0.
4338 Set DECL_RTL to that place. */
4340 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4342 /* If a BLKmode arrives in registers, copy it to a stack slot.
4343 Handle calls that pass values in multiple non-contiguous
4344 locations. The Irix 6 ABI has examples of this. */
4345 if (GET_CODE (entry_parm) == REG
4346 || GET_CODE (entry_parm) == PARALLEL)
4348 int size_stored
4349 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4350 UNITS_PER_WORD);
4352 /* Note that we will be storing an integral number of words.
4353 So we have to be careful to ensure that we allocate an
4354 integral number of words. We do this below in the
4355 assign_stack_local if space was not allocated in the argument
4356 list. If it was, this will not work if PARM_BOUNDARY is not
4357 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4358 if it becomes a problem. */
4360 if (stack_parm == 0)
4362 stack_parm
4363 = assign_stack_local (GET_MODE (entry_parm),
4364 size_stored, 0);
4366 /* If this is a memory ref that contains aggregate
4367 components, mark it as such for cse and loop optimize. */
4368 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4371 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4372 abort ();
4374 if (TREE_READONLY (parm))
4375 RTX_UNCHANGING_P (stack_parm) = 1;
4377 /* Handle calls that pass values in multiple non-contiguous
4378 locations. The Irix 6 ABI has examples of this. */
4379 if (GET_CODE (entry_parm) == PARALLEL)
4380 emit_group_store (validize_mem (stack_parm), entry_parm,
4381 int_size_in_bytes (TREE_TYPE (parm)),
4382 (TYPE_ALIGN (TREE_TYPE (parm))
4383 / BITS_PER_UNIT));
4384 else
4385 move_block_from_reg (REGNO (entry_parm),
4386 validize_mem (stack_parm),
4387 size_stored / UNITS_PER_WORD,
4388 int_size_in_bytes (TREE_TYPE (parm)));
4390 DECL_RTL (parm) = stack_parm;
4392 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4393 && ! DECL_INLINE (fndecl))
4394 /* layout_decl may set this. */
4395 || TREE_ADDRESSABLE (parm)
4396 || TREE_SIDE_EFFECTS (parm)
4397 /* If -ffloat-store specified, don't put explicit
4398 float variables into registers. */
4399 || (flag_float_store
4400 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4401 /* Always assign pseudo to structure return or item passed
4402 by invisible reference. */
4403 || passed_pointer || parm == function_result_decl)
4405 /* Store the parm in a pseudoregister during the function, but we
4406 may need to do it in a wider mode. */
4408 register rtx parmreg;
4409 int regno, regnoi = 0, regnor = 0;
4411 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4413 promoted_nominal_mode
4414 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4416 parmreg = gen_reg_rtx (promoted_nominal_mode);
4417 mark_user_reg (parmreg);
4419 /* If this was an item that we received a pointer to, set DECL_RTL
4420 appropriately. */
4421 if (passed_pointer)
4423 DECL_RTL (parm)
4424 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4425 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4427 else
4428 DECL_RTL (parm) = parmreg;
4430 /* Copy the value into the register. */
4431 if (nominal_mode != passed_mode
4432 || promoted_nominal_mode != promoted_mode)
4434 int save_tree_used;
4435 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4436 mode, by the caller. We now have to convert it to
4437 NOMINAL_MODE, if different. However, PARMREG may be in
4438 a different mode than NOMINAL_MODE if it is being stored
4439 promoted.
4441 If ENTRY_PARM is a hard register, it might be in a register
4442 not valid for operating in its mode (e.g., an odd-numbered
4443 register for a DFmode). In that case, moves are the only
4444 thing valid, so we can't do a convert from there. This
4445 occurs when the calling sequence allow such misaligned
4446 usages.
4448 In addition, the conversion may involve a call, which could
4449 clobber parameters which haven't been copied to pseudo
4450 registers yet. Therefore, we must first copy the parm to
4451 a pseudo reg here, and save the conversion until after all
4452 parameters have been moved. */
4454 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4456 emit_move_insn (tempreg, validize_mem (entry_parm));
4458 push_to_sequence (conversion_insns);
4459 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4461 /* TREE_USED gets set erroneously during expand_assignment. */
4462 save_tree_used = TREE_USED (parm);
4463 expand_assignment (parm,
4464 make_tree (nominal_type, tempreg), 0, 0);
4465 TREE_USED (parm) = save_tree_used;
4466 conversion_insns = get_insns ();
4467 did_conversion = 1;
4468 end_sequence ();
4470 else
4471 emit_move_insn (parmreg, validize_mem (entry_parm));
4473 /* If we were passed a pointer but the actual value
4474 can safely live in a register, put it in one. */
4475 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4476 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4477 && ! DECL_INLINE (fndecl))
4478 /* layout_decl may set this. */
4479 || TREE_ADDRESSABLE (parm)
4480 || TREE_SIDE_EFFECTS (parm)
4481 /* If -ffloat-store specified, don't put explicit
4482 float variables into registers. */
4483 || (flag_float_store
4484 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4486 /* We can't use nominal_mode, because it will have been set to
4487 Pmode above. We must use the actual mode of the parm. */
4488 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4489 mark_user_reg (parmreg);
4490 emit_move_insn (parmreg, DECL_RTL (parm));
4491 DECL_RTL (parm) = parmreg;
4492 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4493 now the parm. */
4494 stack_parm = 0;
4496 #ifdef FUNCTION_ARG_CALLEE_COPIES
4497 /* If we are passed an arg by reference and it is our responsibility
4498 to make a copy, do it now.
4499 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4500 original argument, so we must recreate them in the call to
4501 FUNCTION_ARG_CALLEE_COPIES. */
4502 /* ??? Later add code to handle the case that if the argument isn't
4503 modified, don't do the copy. */
4505 else if (passed_pointer
4506 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4507 TYPE_MODE (DECL_ARG_TYPE (parm)),
4508 DECL_ARG_TYPE (parm),
4509 named_arg)
4510 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4512 rtx copy;
4513 tree type = DECL_ARG_TYPE (parm);
4515 /* This sequence may involve a library call perhaps clobbering
4516 registers that haven't been copied to pseudos yet. */
4518 push_to_sequence (conversion_insns);
4520 if (TYPE_SIZE (type) == 0
4521 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4522 /* This is a variable sized object. */
4523 copy = gen_rtx_MEM (BLKmode,
4524 allocate_dynamic_stack_space
4525 (expr_size (parm), NULL_RTX,
4526 TYPE_ALIGN (type)));
4527 else
4528 copy = assign_stack_temp (TYPE_MODE (type),
4529 int_size_in_bytes (type), 1);
4530 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4531 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4533 store_expr (parm, copy, 0);
4534 emit_move_insn (parmreg, XEXP (copy, 0));
4535 if (current_function_check_memory_usage)
4536 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4537 XEXP (copy, 0), Pmode,
4538 GEN_INT (int_size_in_bytes (type)),
4539 TYPE_MODE (sizetype),
4540 GEN_INT (MEMORY_USE_RW),
4541 TYPE_MODE (integer_type_node));
4542 conversion_insns = get_insns ();
4543 did_conversion = 1;
4544 end_sequence ();
4546 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4548 /* In any case, record the parm's desired stack location
4549 in case we later discover it must live in the stack.
4551 If it is a COMPLEX value, store the stack location for both
4552 halves. */
4554 if (GET_CODE (parmreg) == CONCAT)
4555 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4556 else
4557 regno = REGNO (parmreg);
4559 if (regno >= max_parm_reg)
4561 rtx *new;
4562 int old_max_parm_reg = max_parm_reg;
4564 /* It's slow to expand this one register at a time,
4565 but it's also rare and we need max_parm_reg to be
4566 precisely correct. */
4567 max_parm_reg = regno + 1;
4568 new = (rtx *) xrealloc (parm_reg_stack_loc,
4569 max_parm_reg * sizeof (rtx));
4570 bzero ((char *) (new + old_max_parm_reg),
4571 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4572 parm_reg_stack_loc = new;
4575 if (GET_CODE (parmreg) == CONCAT)
4577 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4579 regnor = REGNO (gen_realpart (submode, parmreg));
4580 regnoi = REGNO (gen_imagpart (submode, parmreg));
4582 if (stack_parm != 0)
4584 parm_reg_stack_loc[regnor]
4585 = gen_realpart (submode, stack_parm);
4586 parm_reg_stack_loc[regnoi]
4587 = gen_imagpart (submode, stack_parm);
4589 else
4591 parm_reg_stack_loc[regnor] = 0;
4592 parm_reg_stack_loc[regnoi] = 0;
4595 else
4596 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4598 /* Mark the register as eliminable if we did no conversion
4599 and it was copied from memory at a fixed offset,
4600 and the arg pointer was not copied to a pseudo-reg.
4601 If the arg pointer is a pseudo reg or the offset formed
4602 an invalid address, such memory-equivalences
4603 as we make here would screw up life analysis for it. */
4604 if (nominal_mode == passed_mode
4605 && ! did_conversion
4606 && stack_parm != 0
4607 && GET_CODE (stack_parm) == MEM
4608 && stack_offset.var == 0
4609 && reg_mentioned_p (virtual_incoming_args_rtx,
4610 XEXP (stack_parm, 0)))
4612 rtx linsn = get_last_insn ();
4613 rtx sinsn, set;
4615 /* Mark complex types separately. */
4616 if (GET_CODE (parmreg) == CONCAT)
4617 /* Scan backwards for the set of the real and
4618 imaginary parts. */
4619 for (sinsn = linsn; sinsn != 0;
4620 sinsn = prev_nonnote_insn (sinsn))
4622 set = single_set (sinsn);
4623 if (set != 0
4624 && SET_DEST (set) == regno_reg_rtx [regnoi])
4625 REG_NOTES (sinsn)
4626 = gen_rtx_EXPR_LIST (REG_EQUIV,
4627 parm_reg_stack_loc[regnoi],
4628 REG_NOTES (sinsn));
4629 else if (set != 0
4630 && SET_DEST (set) == regno_reg_rtx [regnor])
4631 REG_NOTES (sinsn)
4632 = gen_rtx_EXPR_LIST (REG_EQUIV,
4633 parm_reg_stack_loc[regnor],
4634 REG_NOTES (sinsn));
4636 else if ((set = single_set (linsn)) != 0
4637 && SET_DEST (set) == parmreg)
4638 REG_NOTES (linsn)
4639 = gen_rtx_EXPR_LIST (REG_EQUIV,
4640 stack_parm, REG_NOTES (linsn));
4643 /* For pointer data type, suggest pointer register. */
4644 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4645 mark_reg_pointer (parmreg,
4646 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4647 / BITS_PER_UNIT));
4649 else
4651 /* Value must be stored in the stack slot STACK_PARM
4652 during function execution. */
4654 if (promoted_mode != nominal_mode)
4656 /* Conversion is required. */
4657 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4659 emit_move_insn (tempreg, validize_mem (entry_parm));
4661 push_to_sequence (conversion_insns);
4662 entry_parm = convert_to_mode (nominal_mode, tempreg,
4663 TREE_UNSIGNED (TREE_TYPE (parm)));
4664 if (stack_parm)
4666 /* ??? This may need a big-endian conversion on sparc64. */
4667 stack_parm = change_address (stack_parm, nominal_mode,
4668 NULL_RTX);
4670 conversion_insns = get_insns ();
4671 did_conversion = 1;
4672 end_sequence ();
4675 if (entry_parm != stack_parm)
4677 if (stack_parm == 0)
4679 stack_parm
4680 = assign_stack_local (GET_MODE (entry_parm),
4681 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4682 /* If this is a memory ref that contains aggregate components,
4683 mark it as such for cse and loop optimize. */
4684 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4687 if (promoted_mode != nominal_mode)
4689 push_to_sequence (conversion_insns);
4690 emit_move_insn (validize_mem (stack_parm),
4691 validize_mem (entry_parm));
4692 conversion_insns = get_insns ();
4693 end_sequence ();
4695 else
4696 emit_move_insn (validize_mem (stack_parm),
4697 validize_mem (entry_parm));
4699 if (current_function_check_memory_usage)
4701 push_to_sequence (conversion_insns);
4702 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4703 XEXP (stack_parm, 0), Pmode,
4704 GEN_INT (GET_MODE_SIZE (GET_MODE
4705 (entry_parm))),
4706 TYPE_MODE (sizetype),
4707 GEN_INT (MEMORY_USE_RW),
4708 TYPE_MODE (integer_type_node));
4710 conversion_insns = get_insns ();
4711 end_sequence ();
4713 DECL_RTL (parm) = stack_parm;
4716 /* If this "parameter" was the place where we are receiving the
4717 function's incoming structure pointer, set up the result. */
4718 if (parm == function_result_decl)
4720 tree result = DECL_RESULT (fndecl);
4721 tree restype = TREE_TYPE (result);
4723 DECL_RTL (result)
4724 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4726 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4727 AGGREGATE_TYPE_P (restype));
4730 if (TREE_THIS_VOLATILE (parm))
4731 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4732 if (TREE_READONLY (parm))
4733 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4736 /* Output all parameter conversion instructions (possibly including calls)
4737 now that all parameters have been copied out of hard registers. */
4738 emit_insns (conversion_insns);
4740 last_parm_insn = get_last_insn ();
4742 current_function_args_size = stack_args_size.constant;
4744 /* Adjust function incoming argument size for alignment and
4745 minimum length. */
4747 #ifdef REG_PARM_STACK_SPACE
4748 #ifndef MAYBE_REG_PARM_STACK_SPACE
4749 current_function_args_size = MAX (current_function_args_size,
4750 REG_PARM_STACK_SPACE (fndecl));
4751 #endif
4752 #endif
4754 #ifdef STACK_BOUNDARY
4755 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4757 current_function_args_size
4758 = ((current_function_args_size + STACK_BYTES - 1)
4759 / STACK_BYTES) * STACK_BYTES;
4760 #endif
4762 #ifdef ARGS_GROW_DOWNWARD
4763 current_function_arg_offset_rtx
4764 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4765 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4766 size_int (-stack_args_size.constant)),
4767 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4768 #else
4769 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4770 #endif
4772 /* See how many bytes, if any, of its args a function should try to pop
4773 on return. */
4775 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4776 current_function_args_size);
4778 /* For stdarg.h function, save info about
4779 regs and stack space used by the named args. */
4781 if (!hide_last_arg)
4782 current_function_args_info = args_so_far;
4784 /* Set the rtx used for the function return value. Put this in its
4785 own variable so any optimizers that need this information don't have
4786 to include tree.h. Do this here so it gets done when an inlined
4787 function gets output. */
4789 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4792 /* Indicate whether REGNO is an incoming argument to the current function
4793 that was promoted to a wider mode. If so, return the RTX for the
4794 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4795 that REGNO is promoted from and whether the promotion was signed or
4796 unsigned. */
4798 #ifdef PROMOTE_FUNCTION_ARGS
4801 promoted_input_arg (regno, pmode, punsignedp)
4802 int regno;
4803 enum machine_mode *pmode;
4804 int *punsignedp;
4806 tree arg;
4808 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4809 arg = TREE_CHAIN (arg))
4810 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4811 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4812 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4814 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4815 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4817 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4818 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4819 && mode != DECL_MODE (arg))
4821 *pmode = DECL_MODE (arg);
4822 *punsignedp = unsignedp;
4823 return DECL_INCOMING_RTL (arg);
4827 return 0;
4830 #endif
4832 /* Compute the size and offset from the start of the stacked arguments for a
4833 parm passed in mode PASSED_MODE and with type TYPE.
4835 INITIAL_OFFSET_PTR points to the current offset into the stacked
4836 arguments.
4838 The starting offset and size for this parm are returned in *OFFSET_PTR
4839 and *ARG_SIZE_PTR, respectively.
4841 IN_REGS is non-zero if the argument will be passed in registers. It will
4842 never be set if REG_PARM_STACK_SPACE is not defined.
4844 FNDECL is the function in which the argument was defined.
4846 There are two types of rounding that are done. The first, controlled by
4847 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4848 list to be aligned to the specific boundary (in bits). This rounding
4849 affects the initial and starting offsets, but not the argument size.
4851 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4852 optionally rounds the size of the parm to PARM_BOUNDARY. The
4853 initial offset is not affected by this rounding, while the size always
4854 is and the starting offset may be. */
4856 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4857 initial_offset_ptr is positive because locate_and_pad_parm's
4858 callers pass in the total size of args so far as
4859 initial_offset_ptr. arg_size_ptr is always positive.*/
4861 void
4862 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4863 initial_offset_ptr, offset_ptr, arg_size_ptr)
4864 enum machine_mode passed_mode;
4865 tree type;
4866 int in_regs;
4867 tree fndecl ATTRIBUTE_UNUSED;
4868 struct args_size *initial_offset_ptr;
4869 struct args_size *offset_ptr;
4870 struct args_size *arg_size_ptr;
4872 tree sizetree
4873 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4874 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4875 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4877 #ifdef REG_PARM_STACK_SPACE
4878 /* If we have found a stack parm before we reach the end of the
4879 area reserved for registers, skip that area. */
4880 if (! in_regs)
4882 int reg_parm_stack_space = 0;
4884 #ifdef MAYBE_REG_PARM_STACK_SPACE
4885 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4886 #else
4887 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4888 #endif
4889 if (reg_parm_stack_space > 0)
4891 if (initial_offset_ptr->var)
4893 initial_offset_ptr->var
4894 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4895 size_int (reg_parm_stack_space));
4896 initial_offset_ptr->constant = 0;
4898 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4899 initial_offset_ptr->constant = reg_parm_stack_space;
4902 #endif /* REG_PARM_STACK_SPACE */
4904 arg_size_ptr->var = 0;
4905 arg_size_ptr->constant = 0;
4907 #ifdef ARGS_GROW_DOWNWARD
4908 if (initial_offset_ptr->var)
4910 offset_ptr->constant = 0;
4911 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4912 initial_offset_ptr->var);
4914 else
4916 offset_ptr->constant = - initial_offset_ptr->constant;
4917 offset_ptr->var = 0;
4919 if (where_pad != none
4920 && (TREE_CODE (sizetree) != INTEGER_CST
4921 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4922 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4923 SUB_PARM_SIZE (*offset_ptr, sizetree);
4924 if (where_pad != downward)
4925 pad_to_arg_alignment (offset_ptr, boundary);
4926 if (initial_offset_ptr->var)
4928 arg_size_ptr->var = size_binop (MINUS_EXPR,
4929 size_binop (MINUS_EXPR,
4930 integer_zero_node,
4931 initial_offset_ptr->var),
4932 offset_ptr->var);
4934 else
4936 arg_size_ptr->constant = (- initial_offset_ptr->constant
4937 - offset_ptr->constant);
4939 #else /* !ARGS_GROW_DOWNWARD */
4940 pad_to_arg_alignment (initial_offset_ptr, boundary);
4941 *offset_ptr = *initial_offset_ptr;
4943 #ifdef PUSH_ROUNDING
4944 if (passed_mode != BLKmode)
4945 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4946 #endif
4948 /* Pad_below needs the pre-rounded size to know how much to pad below
4949 so this must be done before rounding up. */
4950 if (where_pad == downward
4951 /* However, BLKmode args passed in regs have their padding done elsewhere.
4952 The stack slot must be able to hold the entire register. */
4953 && !(in_regs && passed_mode == BLKmode))
4954 pad_below (offset_ptr, passed_mode, sizetree);
4956 if (where_pad != none
4957 && (TREE_CODE (sizetree) != INTEGER_CST
4958 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4959 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4961 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4962 #endif /* ARGS_GROW_DOWNWARD */
4965 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4966 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4968 static void
4969 pad_to_arg_alignment (offset_ptr, boundary)
4970 struct args_size *offset_ptr;
4971 int boundary;
4973 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4975 if (boundary > BITS_PER_UNIT)
4977 if (offset_ptr->var)
4979 offset_ptr->var =
4980 #ifdef ARGS_GROW_DOWNWARD
4981 round_down
4982 #else
4983 round_up
4984 #endif
4985 (ARGS_SIZE_TREE (*offset_ptr),
4986 boundary / BITS_PER_UNIT);
4987 offset_ptr->constant = 0; /*?*/
4989 else
4990 offset_ptr->constant =
4991 #ifdef ARGS_GROW_DOWNWARD
4992 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4993 #else
4994 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4995 #endif
4999 #ifndef ARGS_GROW_DOWNWARD
5000 static void
5001 pad_below (offset_ptr, passed_mode, sizetree)
5002 struct args_size *offset_ptr;
5003 enum machine_mode passed_mode;
5004 tree sizetree;
5006 if (passed_mode != BLKmode)
5008 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5009 offset_ptr->constant
5010 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5011 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5012 - GET_MODE_SIZE (passed_mode));
5014 else
5016 if (TREE_CODE (sizetree) != INTEGER_CST
5017 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5019 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5020 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5021 /* Add it in. */
5022 ADD_PARM_SIZE (*offset_ptr, s2);
5023 SUB_PARM_SIZE (*offset_ptr, sizetree);
5027 #endif
5029 #ifdef ARGS_GROW_DOWNWARD
5030 static tree
5031 round_down (value, divisor)
5032 tree value;
5033 int divisor;
5035 return size_binop (MULT_EXPR,
5036 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5037 size_int (divisor));
5039 #endif
5041 /* Walk the tree of blocks describing the binding levels within a function
5042 and warn about uninitialized variables.
5043 This is done after calling flow_analysis and before global_alloc
5044 clobbers the pseudo-regs to hard regs. */
5046 void
5047 uninitialized_vars_warning (block)
5048 tree block;
5050 register tree decl, sub;
5051 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5053 if (TREE_CODE (decl) == VAR_DECL
5054 /* These warnings are unreliable for and aggregates
5055 because assigning the fields one by one can fail to convince
5056 flow.c that the entire aggregate was initialized.
5057 Unions are troublesome because members may be shorter. */
5058 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5059 && DECL_RTL (decl) != 0
5060 && GET_CODE (DECL_RTL (decl)) == REG
5061 /* Global optimizations can make it difficult to determine if a
5062 particular variable has been initialized. However, a VAR_DECL
5063 with a nonzero DECL_INITIAL had an initializer, so do not
5064 claim it is potentially uninitialized.
5066 We do not care about the actual value in DECL_INITIAL, so we do
5067 not worry that it may be a dangling pointer. */
5068 && DECL_INITIAL (decl) == NULL_TREE
5069 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5070 warning_with_decl (decl,
5071 "`%s' might be used uninitialized in this function");
5072 if (TREE_CODE (decl) == VAR_DECL
5073 && DECL_RTL (decl) != 0
5074 && GET_CODE (DECL_RTL (decl)) == REG
5075 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5076 warning_with_decl (decl,
5077 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5079 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5080 uninitialized_vars_warning (sub);
5083 /* Do the appropriate part of uninitialized_vars_warning
5084 but for arguments instead of local variables. */
5086 void
5087 setjmp_args_warning ()
5089 register tree decl;
5090 for (decl = DECL_ARGUMENTS (current_function_decl);
5091 decl; decl = TREE_CHAIN (decl))
5092 if (DECL_RTL (decl) != 0
5093 && GET_CODE (DECL_RTL (decl)) == REG
5094 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5095 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5098 /* If this function call setjmp, put all vars into the stack
5099 unless they were declared `register'. */
5101 void
5102 setjmp_protect (block)
5103 tree block;
5105 register tree decl, sub;
5106 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5107 if ((TREE_CODE (decl) == VAR_DECL
5108 || TREE_CODE (decl) == PARM_DECL)
5109 && DECL_RTL (decl) != 0
5110 && (GET_CODE (DECL_RTL (decl)) == REG
5111 || (GET_CODE (DECL_RTL (decl)) == MEM
5112 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5113 /* If this variable came from an inline function, it must be
5114 that its life doesn't overlap the setjmp. If there was a
5115 setjmp in the function, it would already be in memory. We
5116 must exclude such variable because their DECL_RTL might be
5117 set to strange things such as virtual_stack_vars_rtx. */
5118 && ! DECL_FROM_INLINE (decl)
5119 && (
5120 #ifdef NON_SAVING_SETJMP
5121 /* If longjmp doesn't restore the registers,
5122 don't put anything in them. */
5123 NON_SAVING_SETJMP
5125 #endif
5126 ! DECL_REGISTER (decl)))
5127 put_var_into_stack (decl);
5128 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5129 setjmp_protect (sub);
5132 /* Like the previous function, but for args instead of local variables. */
5134 void
5135 setjmp_protect_args ()
5137 register tree decl;
5138 for (decl = DECL_ARGUMENTS (current_function_decl);
5139 decl; decl = TREE_CHAIN (decl))
5140 if ((TREE_CODE (decl) == VAR_DECL
5141 || TREE_CODE (decl) == PARM_DECL)
5142 && DECL_RTL (decl) != 0
5143 && (GET_CODE (DECL_RTL (decl)) == REG
5144 || (GET_CODE (DECL_RTL (decl)) == MEM
5145 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5146 && (
5147 /* If longjmp doesn't restore the registers,
5148 don't put anything in them. */
5149 #ifdef NON_SAVING_SETJMP
5150 NON_SAVING_SETJMP
5152 #endif
5153 ! DECL_REGISTER (decl)))
5154 put_var_into_stack (decl);
5157 /* Return the context-pointer register corresponding to DECL,
5158 or 0 if it does not need one. */
5161 lookup_static_chain (decl)
5162 tree decl;
5164 tree context = decl_function_context (decl);
5165 tree link;
5167 if (context == 0
5168 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5169 return 0;
5171 /* We treat inline_function_decl as an alias for the current function
5172 because that is the inline function whose vars, types, etc.
5173 are being merged into the current function.
5174 See expand_inline_function. */
5175 if (context == current_function_decl || context == inline_function_decl)
5176 return virtual_stack_vars_rtx;
5178 for (link = context_display; link; link = TREE_CHAIN (link))
5179 if (TREE_PURPOSE (link) == context)
5180 return RTL_EXPR_RTL (TREE_VALUE (link));
5182 abort ();
5185 /* Convert a stack slot address ADDR for variable VAR
5186 (from a containing function)
5187 into an address valid in this function (using a static chain). */
5190 fix_lexical_addr (addr, var)
5191 rtx addr;
5192 tree var;
5194 rtx basereg;
5195 HOST_WIDE_INT displacement;
5196 tree context = decl_function_context (var);
5197 struct function *fp;
5198 rtx base = 0;
5200 /* If this is the present function, we need not do anything. */
5201 if (context == current_function_decl || context == inline_function_decl)
5202 return addr;
5204 for (fp = outer_function_chain; fp; fp = fp->next)
5205 if (fp->decl == context)
5206 break;
5208 if (fp == 0)
5209 abort ();
5211 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5212 addr = XEXP (XEXP (addr, 0), 0);
5214 /* Decode given address as base reg plus displacement. */
5215 if (GET_CODE (addr) == REG)
5216 basereg = addr, displacement = 0;
5217 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5218 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5219 else
5220 abort ();
5222 /* We accept vars reached via the containing function's
5223 incoming arg pointer and via its stack variables pointer. */
5224 if (basereg == fp->internal_arg_pointer)
5226 /* If reached via arg pointer, get the arg pointer value
5227 out of that function's stack frame.
5229 There are two cases: If a separate ap is needed, allocate a
5230 slot in the outer function for it and dereference it that way.
5231 This is correct even if the real ap is actually a pseudo.
5232 Otherwise, just adjust the offset from the frame pointer to
5233 compensate. */
5235 #ifdef NEED_SEPARATE_AP
5236 rtx addr;
5238 if (fp->x_arg_pointer_save_area == 0)
5239 fp->x_arg_pointer_save_area
5240 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5242 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5243 addr = memory_address (Pmode, addr);
5245 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5246 #else
5247 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5248 base = lookup_static_chain (var);
5249 #endif
5252 else if (basereg == virtual_stack_vars_rtx)
5254 /* This is the same code as lookup_static_chain, duplicated here to
5255 avoid an extra call to decl_function_context. */
5256 tree link;
5258 for (link = context_display; link; link = TREE_CHAIN (link))
5259 if (TREE_PURPOSE (link) == context)
5261 base = RTL_EXPR_RTL (TREE_VALUE (link));
5262 break;
5266 if (base == 0)
5267 abort ();
5269 /* Use same offset, relative to appropriate static chain or argument
5270 pointer. */
5271 return plus_constant (base, displacement);
5274 /* Return the address of the trampoline for entering nested fn FUNCTION.
5275 If necessary, allocate a trampoline (in the stack frame)
5276 and emit rtl to initialize its contents (at entry to this function). */
5279 trampoline_address (function)
5280 tree function;
5282 tree link;
5283 tree rtlexp;
5284 rtx tramp;
5285 struct function *fp;
5286 tree fn_context;
5288 /* Find an existing trampoline and return it. */
5289 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5290 if (TREE_PURPOSE (link) == function)
5291 return
5292 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5294 for (fp = outer_function_chain; fp; fp = fp->next)
5295 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5296 if (TREE_PURPOSE (link) == function)
5298 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5299 function);
5300 return round_trampoline_addr (tramp);
5303 /* None exists; we must make one. */
5305 /* Find the `struct function' for the function containing FUNCTION. */
5306 fp = 0;
5307 fn_context = decl_function_context (function);
5308 if (fn_context != current_function_decl
5309 && fn_context != inline_function_decl)
5310 for (fp = outer_function_chain; fp; fp = fp->next)
5311 if (fp->decl == fn_context)
5312 break;
5314 /* Allocate run-time space for this trampoline
5315 (usually in the defining function's stack frame). */
5316 #ifdef ALLOCATE_TRAMPOLINE
5317 tramp = ALLOCATE_TRAMPOLINE (fp);
5318 #else
5319 /* If rounding needed, allocate extra space
5320 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5321 #ifdef TRAMPOLINE_ALIGNMENT
5322 #define TRAMPOLINE_REAL_SIZE \
5323 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5324 #else
5325 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5326 #endif
5327 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5328 fp ? fp : current_function);
5329 #endif
5331 /* Record the trampoline for reuse and note it for later initialization
5332 by expand_function_end. */
5333 if (fp != 0)
5335 push_obstacks (fp->function_maybepermanent_obstack,
5336 fp->function_maybepermanent_obstack);
5337 rtlexp = make_node (RTL_EXPR);
5338 RTL_EXPR_RTL (rtlexp) = tramp;
5339 fp->x_trampoline_list = tree_cons (function, rtlexp,
5340 fp->x_trampoline_list);
5341 pop_obstacks ();
5343 else
5345 /* Make the RTL_EXPR node temporary, not momentary, so that the
5346 trampoline_list doesn't become garbage. */
5347 int momentary = suspend_momentary ();
5348 rtlexp = make_node (RTL_EXPR);
5349 resume_momentary (momentary);
5351 RTL_EXPR_RTL (rtlexp) = tramp;
5352 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5355 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5356 return round_trampoline_addr (tramp);
5359 /* Given a trampoline address,
5360 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5362 static rtx
5363 round_trampoline_addr (tramp)
5364 rtx tramp;
5366 #ifdef TRAMPOLINE_ALIGNMENT
5367 /* Round address up to desired boundary. */
5368 rtx temp = gen_reg_rtx (Pmode);
5369 temp = expand_binop (Pmode, add_optab, tramp,
5370 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5371 temp, 0, OPTAB_LIB_WIDEN);
5372 tramp = expand_binop (Pmode, and_optab, temp,
5373 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5374 temp, 0, OPTAB_LIB_WIDEN);
5375 #endif
5376 return tramp;
5379 /* Insert the BLOCK in the block-tree before LAST_INSN. */
5381 void
5382 retrofit_block (block, last_insn)
5383 tree block;
5384 rtx last_insn;
5386 rtx insn;
5388 /* Now insert the new BLOCK at the right place in the block trees
5389 for the function which called the inline function. We just look
5390 backwards for a NOTE_INSN_BLOCK_{BEG,END}. If we find the
5391 beginning of a block, then this new block becomes the first
5392 subblock of that block. If we find the end of a block, then this
5393 new block follows that block in the list of blocks. */
5394 for (insn = last_insn; insn; insn = PREV_INSN (insn))
5395 if (GET_CODE (insn) == NOTE
5396 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
5397 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
5398 break;
5399 if (!insn || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5401 tree superblock;
5403 if (insn)
5404 superblock = NOTE_BLOCK (insn);
5405 else
5406 superblock = DECL_INITIAL (current_function_decl);
5408 BLOCK_SUPERCONTEXT (block) = superblock;
5409 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (superblock);
5410 BLOCK_SUBBLOCKS (superblock) = block;
5412 else
5414 tree prevblock = NOTE_BLOCK (insn);
5416 BLOCK_SUPERCONTEXT (block) = BLOCK_SUPERCONTEXT (prevblock);
5417 BLOCK_CHAIN (block) = BLOCK_CHAIN (prevblock);
5418 BLOCK_CHAIN (prevblock) = block;
5422 /* The functions identify_blocks and reorder_blocks provide a way to
5423 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5424 duplicate portions of the RTL code. Call identify_blocks before
5425 changing the RTL, and call reorder_blocks after. */
5427 /* Put all this function's BLOCK nodes including those that are chained
5428 onto the first block into a vector, and return it.
5429 Also store in each NOTE for the beginning or end of a block
5430 the index of that block in the vector.
5431 The arguments are BLOCK, the chain of top-level blocks of the function,
5432 and INSNS, the insn chain of the function. */
5434 void
5435 identify_blocks (block, insns)
5436 tree block;
5437 rtx insns;
5439 int n_blocks;
5440 tree *block_vector;
5441 tree *block_stack;
5442 int depth = 0;
5443 int current_block_number = 1;
5444 rtx insn;
5446 if (block == 0)
5447 return;
5449 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5450 depth-first order. */
5451 n_blocks = all_blocks (block, 0);
5452 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5453 all_blocks (block, block_vector);
5455 block_stack = (tree *) alloca (n_blocks * sizeof (tree));
5457 for (insn = insns; insn; insn = NEXT_INSN (insn))
5458 if (GET_CODE (insn) == NOTE)
5460 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5462 tree b;
5464 /* If there are more block notes than BLOCKs, something
5465 is badly wrong. */
5466 if (current_block_number == n_blocks)
5467 abort ();
5469 b = block_vector[current_block_number++];
5470 NOTE_BLOCK (insn) = b;
5471 block_stack[depth++] = b;
5473 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5475 if (depth == 0)
5476 /* There are more NOTE_INSN_BLOCK_ENDs that
5477 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5478 abort ();
5480 NOTE_BLOCK (insn) = block_stack[--depth];
5484 /* In whole-function mode, we might not have seen the whole function
5485 yet, so we might not use up all the blocks. */
5486 if (n_blocks != current_block_number
5487 && !current_function->x_whole_function_mode_p)
5488 abort ();
5490 free (block_vector);
5493 /* Given a revised instruction chain, rebuild the tree structure of
5494 BLOCK nodes to correspond to the new order of RTL. The new block
5495 tree is inserted below TOP_BLOCK. Returns the current top-level
5496 block. */
5498 tree
5499 reorder_blocks (block, insns)
5500 tree block;
5501 rtx insns;
5503 tree current_block = block;
5504 rtx insn;
5506 if (block == NULL_TREE)
5507 return NULL_TREE;
5509 /* Prune the old trees away, so that it doesn't get in the way. */
5510 BLOCK_SUBBLOCKS (current_block) = 0;
5511 BLOCK_CHAIN (current_block) = 0;
5513 for (insn = insns; insn; insn = NEXT_INSN (insn))
5514 if (GET_CODE (insn) == NOTE)
5516 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5518 tree block = NOTE_BLOCK (insn);
5519 /* If we have seen this block before, copy it. */
5520 if (TREE_ASM_WRITTEN (block))
5521 block = copy_node (block);
5522 BLOCK_SUBBLOCKS (block) = 0;
5523 TREE_ASM_WRITTEN (block) = 1;
5524 BLOCK_SUPERCONTEXT (block) = current_block;
5525 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5526 BLOCK_SUBBLOCKS (current_block) = block;
5527 current_block = block;
5528 NOTE_SOURCE_FILE (insn) = 0;
5530 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5532 BLOCK_SUBBLOCKS (current_block)
5533 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5534 current_block = BLOCK_SUPERCONTEXT (current_block);
5535 NOTE_SOURCE_FILE (insn) = 0;
5539 BLOCK_SUBBLOCKS (current_block)
5540 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5541 return current_block;
5544 /* Reverse the order of elements in the chain T of blocks,
5545 and return the new head of the chain (old last element). */
5547 static tree
5548 blocks_nreverse (t)
5549 tree t;
5551 register tree prev = 0, decl, next;
5552 for (decl = t; decl; decl = next)
5554 next = BLOCK_CHAIN (decl);
5555 BLOCK_CHAIN (decl) = prev;
5556 prev = decl;
5558 return prev;
5561 /* Count the subblocks of the list starting with BLOCK, and list them
5562 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5563 blocks. */
5565 static int
5566 all_blocks (block, vector)
5567 tree block;
5568 tree *vector;
5570 int n_blocks = 0;
5572 while (block)
5574 TREE_ASM_WRITTEN (block) = 0;
5576 /* Record this block. */
5577 if (vector)
5578 vector[n_blocks] = block;
5580 ++n_blocks;
5582 /* Record the subblocks, and their subblocks... */
5583 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5584 vector ? vector + n_blocks : 0);
5585 block = BLOCK_CHAIN (block);
5588 return n_blocks;
5591 /* Allocate a function structure and reset its contents to the defaults. */
5592 static void
5593 prepare_function_start ()
5595 current_function = (struct function *) xcalloc (1, sizeof (struct function));
5597 init_stmt_for_function ();
5598 init_eh_for_function ();
5600 cse_not_expected = ! optimize;
5602 /* Caller save not needed yet. */
5603 caller_save_needed = 0;
5605 /* No stack slots have been made yet. */
5606 stack_slot_list = 0;
5608 current_function_has_nonlocal_label = 0;
5609 current_function_has_nonlocal_goto = 0;
5611 /* There is no stack slot for handling nonlocal gotos. */
5612 nonlocal_goto_handler_slots = 0;
5613 nonlocal_goto_stack_level = 0;
5615 /* No labels have been declared for nonlocal use. */
5616 nonlocal_labels = 0;
5617 nonlocal_goto_handler_labels = 0;
5619 /* No function calls so far in this function. */
5620 function_call_count = 0;
5622 /* No parm regs have been allocated.
5623 (This is important for output_inline_function.) */
5624 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5626 /* Initialize the RTL mechanism. */
5627 init_emit ();
5629 /* Initialize the queue of pending postincrement and postdecrements,
5630 and some other info in expr.c. */
5631 init_expr ();
5633 /* We haven't done register allocation yet. */
5634 reg_renumber = 0;
5636 init_varasm_status (current_function);
5638 /* Clear out data used for inlining. */
5639 current_function->inlinable = 0;
5640 current_function->original_decl_initial = 0;
5641 current_function->original_arg_vector = 0;
5643 /* Set if a call to setjmp is seen. */
5644 current_function_calls_setjmp = 0;
5646 /* Set if a call to longjmp is seen. */
5647 current_function_calls_longjmp = 0;
5649 current_function_calls_alloca = 0;
5650 current_function_contains_functions = 0;
5651 current_function_is_leaf = 0;
5652 current_function_sp_is_unchanging = 0;
5653 current_function_uses_only_leaf_regs = 0;
5654 current_function_has_computed_jump = 0;
5655 current_function_is_thunk = 0;
5657 current_function_returns_pcc_struct = 0;
5658 current_function_returns_struct = 0;
5659 current_function_epilogue_delay_list = 0;
5660 current_function_uses_const_pool = 0;
5661 current_function_uses_pic_offset_table = 0;
5662 current_function_cannot_inline = 0;
5664 /* We have not yet needed to make a label to jump to for tail-recursion. */
5665 tail_recursion_label = 0;
5667 /* We haven't had a need to make a save area for ap yet. */
5668 arg_pointer_save_area = 0;
5670 /* No stack slots allocated yet. */
5671 frame_offset = 0;
5673 /* No SAVE_EXPRs in this function yet. */
5674 save_expr_regs = 0;
5676 /* No RTL_EXPRs in this function yet. */
5677 rtl_expr_chain = 0;
5679 /* Set up to allocate temporaries. */
5680 init_temp_slots ();
5682 /* Indicate that we need to distinguish between the return value of the
5683 present function and the return value of a function being called. */
5684 rtx_equal_function_value_matters = 1;
5686 /* Indicate that we have not instantiated virtual registers yet. */
5687 virtuals_instantiated = 0;
5689 /* Indicate we have no need of a frame pointer yet. */
5690 frame_pointer_needed = 0;
5692 /* By default assume not varargs or stdarg. */
5693 current_function_varargs = 0;
5694 current_function_stdarg = 0;
5696 /* We haven't made any trampolines for this function yet. */
5697 trampoline_list = 0;
5699 init_pending_stack_adjust ();
5700 inhibit_defer_pop = 0;
5702 current_function_outgoing_args_size = 0;
5704 if (init_lang_status)
5705 (*init_lang_status) (current_function);
5706 if (init_machine_status)
5707 (*init_machine_status) (current_function);
5710 /* Initialize the rtl expansion mechanism so that we can do simple things
5711 like generate sequences. This is used to provide a context during global
5712 initialization of some passes. */
5713 void
5714 init_dummy_function_start ()
5716 prepare_function_start ();
5719 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5720 and initialize static variables for generating RTL for the statements
5721 of the function. */
5723 void
5724 init_function_start (subr, filename, line)
5725 tree subr;
5726 char *filename;
5727 int line;
5729 prepare_function_start ();
5731 /* Remember this function for later. */
5732 current_function->next_global = all_functions;
5733 all_functions = current_function;
5735 current_function_name = (*decl_printable_name) (subr, 2);
5736 current_function->decl = subr;
5738 /* Nonzero if this is a nested function that uses a static chain. */
5740 current_function_needs_context
5741 = (decl_function_context (current_function_decl) != 0
5742 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5744 /* Within function body, compute a type's size as soon it is laid out. */
5745 immediate_size_expand++;
5747 /* Prevent ever trying to delete the first instruction of a function.
5748 Also tell final how to output a linenum before the function prologue.
5749 Note linenums could be missing, e.g. when compiling a Java .class file. */
5750 if (line > 0)
5751 emit_line_note (filename, line);
5753 /* Make sure first insn is a note even if we don't want linenums.
5754 This makes sure the first insn will never be deleted.
5755 Also, final expects a note to appear there. */
5756 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5758 /* Set flags used by final.c. */
5759 if (aggregate_value_p (DECL_RESULT (subr)))
5761 #ifdef PCC_STATIC_STRUCT_RETURN
5762 current_function_returns_pcc_struct = 1;
5763 #endif
5764 current_function_returns_struct = 1;
5767 /* Warn if this value is an aggregate type,
5768 regardless of which calling convention we are using for it. */
5769 if (warn_aggregate_return
5770 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5771 warning ("function returns an aggregate");
5773 current_function_returns_pointer
5774 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5777 /* Make sure all values used by the optimization passes have sane
5778 defaults. */
5779 void
5780 init_function_for_compilation ()
5782 reg_renumber = 0;
5783 /* No prologue/epilogue insns yet. */
5784 prologue = epilogue = 0;
5787 /* Indicate that the current function uses extra args
5788 not explicitly mentioned in the argument list in any fashion. */
5790 void
5791 mark_varargs ()
5793 current_function_varargs = 1;
5796 /* Expand a call to __main at the beginning of a possible main function. */
5798 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5799 #undef HAS_INIT_SECTION
5800 #define HAS_INIT_SECTION
5801 #endif
5803 void
5804 expand_main_function ()
5806 #if !defined (HAS_INIT_SECTION)
5807 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5808 VOIDmode, 0);
5809 #endif /* not HAS_INIT_SECTION */
5812 extern struct obstack permanent_obstack;
5814 /* Start the RTL for a new function, and set variables used for
5815 emitting RTL.
5816 SUBR is the FUNCTION_DECL node.
5817 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5818 the function's parameters, which must be run at any return statement. */
5820 void
5821 expand_function_start (subr, parms_have_cleanups)
5822 tree subr;
5823 int parms_have_cleanups;
5825 register int i;
5826 tree tem;
5827 rtx last_ptr = NULL_RTX;
5829 /* Make sure volatile mem refs aren't considered
5830 valid operands of arithmetic insns. */
5831 init_recog_no_volatile ();
5833 /* Set this before generating any memory accesses. */
5834 current_function_check_memory_usage
5835 = (flag_check_memory_usage
5836 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5838 current_function_instrument_entry_exit
5839 = (flag_instrument_function_entry_exit
5840 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5842 /* If function gets a static chain arg, store it in the stack frame.
5843 Do this first, so it gets the first stack slot offset. */
5844 if (current_function_needs_context)
5846 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5848 /* Delay copying static chain if it is not a register to avoid
5849 conflicts with regs used for parameters. */
5850 if (! SMALL_REGISTER_CLASSES
5851 || GET_CODE (static_chain_incoming_rtx) == REG)
5852 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5855 /* If the parameters of this function need cleaning up, get a label
5856 for the beginning of the code which executes those cleanups. This must
5857 be done before doing anything with return_label. */
5858 if (parms_have_cleanups)
5859 cleanup_label = gen_label_rtx ();
5860 else
5861 cleanup_label = 0;
5863 /* Make the label for return statements to jump to, if this machine
5864 does not have a one-instruction return and uses an epilogue,
5865 or if it returns a structure, or if it has parm cleanups. */
5866 #ifdef HAVE_return
5867 if (cleanup_label == 0 && HAVE_return
5868 && ! current_function_instrument_entry_exit
5869 && ! current_function_returns_pcc_struct
5870 && ! (current_function_returns_struct && ! optimize))
5871 return_label = 0;
5872 else
5873 return_label = gen_label_rtx ();
5874 #else
5875 return_label = gen_label_rtx ();
5876 #endif
5878 /* Initialize rtx used to return the value. */
5879 /* Do this before assign_parms so that we copy the struct value address
5880 before any library calls that assign parms might generate. */
5882 /* Decide whether to return the value in memory or in a register. */
5883 if (aggregate_value_p (DECL_RESULT (subr)))
5885 /* Returning something that won't go in a register. */
5886 register rtx value_address = 0;
5888 #ifdef PCC_STATIC_STRUCT_RETURN
5889 if (current_function_returns_pcc_struct)
5891 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5892 value_address = assemble_static_space (size);
5894 else
5895 #endif
5897 /* Expect to be passed the address of a place to store the value.
5898 If it is passed as an argument, assign_parms will take care of
5899 it. */
5900 if (struct_value_incoming_rtx)
5902 value_address = gen_reg_rtx (Pmode);
5903 emit_move_insn (value_address, struct_value_incoming_rtx);
5906 if (value_address)
5908 DECL_RTL (DECL_RESULT (subr))
5909 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5910 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5911 AGGREGATE_TYPE_P (TREE_TYPE
5912 (DECL_RESULT
5913 (subr))));
5916 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5917 /* If return mode is void, this decl rtl should not be used. */
5918 DECL_RTL (DECL_RESULT (subr)) = 0;
5919 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5921 /* If function will end with cleanup code for parms,
5922 compute the return values into a pseudo reg,
5923 which we will copy into the true return register
5924 after the cleanups are done. */
5926 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5928 #ifdef PROMOTE_FUNCTION_RETURN
5929 tree type = TREE_TYPE (DECL_RESULT (subr));
5930 int unsignedp = TREE_UNSIGNED (type);
5932 mode = promote_mode (type, mode, &unsignedp, 1);
5933 #endif
5935 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5937 else
5938 /* Scalar, returned in a register. */
5940 #ifdef FUNCTION_OUTGOING_VALUE
5941 DECL_RTL (DECL_RESULT (subr))
5942 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5943 #else
5944 DECL_RTL (DECL_RESULT (subr))
5945 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5946 #endif
5948 /* Mark this reg as the function's return value. */
5949 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5951 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5952 /* Needed because we may need to move this to memory
5953 in case it's a named return value whose address is taken. */
5954 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5958 /* Initialize rtx for parameters and local variables.
5959 In some cases this requires emitting insns. */
5961 assign_parms (subr);
5963 /* Copy the static chain now if it wasn't a register. The delay is to
5964 avoid conflicts with the parameter passing registers. */
5966 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5967 if (GET_CODE (static_chain_incoming_rtx) != REG)
5968 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5970 /* The following was moved from init_function_start.
5971 The move is supposed to make sdb output more accurate. */
5972 /* Indicate the beginning of the function body,
5973 as opposed to parm setup. */
5974 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5976 /* If doing stupid allocation, mark parms as born here. */
5978 if (GET_CODE (get_last_insn ()) != NOTE)
5979 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5980 parm_birth_insn = get_last_insn ();
5982 if (obey_regdecls)
5984 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5985 use_variable (regno_reg_rtx[i]);
5987 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5988 use_variable (current_function_internal_arg_pointer);
5991 context_display = 0;
5992 if (current_function_needs_context)
5994 /* Fetch static chain values for containing functions. */
5995 tem = decl_function_context (current_function_decl);
5996 /* If not doing stupid register allocation copy the static chain
5997 pointer into a pseudo. If we have small register classes, copy
5998 the value from memory if static_chain_incoming_rtx is a REG. If
5999 we do stupid register allocation, we use the stack address
6000 generated above. */
6001 if (tem && ! obey_regdecls)
6003 /* If the static chain originally came in a register, put it back
6004 there, then move it out in the next insn. The reason for
6005 this peculiar code is to satisfy function integration. */
6006 if (SMALL_REGISTER_CLASSES
6007 && GET_CODE (static_chain_incoming_rtx) == REG)
6008 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6009 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6012 while (tem)
6014 tree rtlexp = make_node (RTL_EXPR);
6016 RTL_EXPR_RTL (rtlexp) = last_ptr;
6017 context_display = tree_cons (tem, rtlexp, context_display);
6018 tem = decl_function_context (tem);
6019 if (tem == 0)
6020 break;
6021 /* Chain thru stack frames, assuming pointer to next lexical frame
6022 is found at the place we always store it. */
6023 #ifdef FRAME_GROWS_DOWNWARD
6024 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6025 #endif
6026 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6027 memory_address (Pmode,
6028 last_ptr)));
6030 /* If we are not optimizing, ensure that we know that this
6031 piece of context is live over the entire function. */
6032 if (! optimize)
6033 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6034 save_expr_regs);
6038 if (current_function_instrument_entry_exit)
6040 rtx fun = DECL_RTL (current_function_decl);
6041 if (GET_CODE (fun) == MEM)
6042 fun = XEXP (fun, 0);
6043 else
6044 abort ();
6045 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6046 fun, Pmode,
6047 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6049 hard_frame_pointer_rtx),
6050 Pmode);
6053 /* After the display initializations is where the tail-recursion label
6054 should go, if we end up needing one. Ensure we have a NOTE here
6055 since some things (like trampolines) get placed before this. */
6056 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6058 /* Evaluate now the sizes of any types declared among the arguments. */
6059 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6061 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6062 EXPAND_MEMORY_USE_BAD);
6063 /* Flush the queue in case this parameter declaration has
6064 side-effects. */
6065 emit_queue ();
6068 /* Make sure there is a line number after the function entry setup code. */
6069 force_next_line_note ();
6072 /* Undo the effects of init_dummy_function_start. */
6073 void
6074 expand_dummy_function_end ()
6076 /* End any sequences that failed to be closed due to syntax errors. */
6077 while (in_sequence_p ())
6078 end_sequence ();
6080 /* Outside function body, can't compute type's actual size
6081 until next function's body starts. */
6083 free_after_parsing (current_function);
6084 free_after_compilation (current_function);
6085 free (current_function);
6086 current_function = 0;
6089 /* Generate RTL for the end of the current function.
6090 FILENAME and LINE are the current position in the source file.
6092 It is up to language-specific callers to do cleanups for parameters--
6093 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6095 void
6096 expand_function_end (filename, line, end_bindings)
6097 char *filename;
6098 int line;
6099 int end_bindings;
6101 register int i;
6102 tree link;
6104 #ifdef TRAMPOLINE_TEMPLATE
6105 static rtx initial_trampoline;
6106 #endif
6108 finish_expr_for_function ();
6110 #ifdef NON_SAVING_SETJMP
6111 /* Don't put any variables in registers if we call setjmp
6112 on a machine that fails to restore the registers. */
6113 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6115 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6116 setjmp_protect (DECL_INITIAL (current_function_decl));
6118 setjmp_protect_args ();
6120 #endif
6122 /* Save the argument pointer if a save area was made for it. */
6123 if (arg_pointer_save_area)
6125 /* arg_pointer_save_area may not be a valid memory address, so we
6126 have to check it and fix it if necessary. */
6127 rtx seq;
6128 start_sequence ();
6129 emit_move_insn (validize_mem (arg_pointer_save_area),
6130 virtual_incoming_args_rtx);
6131 seq = gen_sequence ();
6132 end_sequence ();
6133 emit_insn_before (seq, tail_recursion_reentry);
6136 /* Initialize any trampolines required by this function. */
6137 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6139 tree function = TREE_PURPOSE (link);
6140 rtx context = lookup_static_chain (function);
6141 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6142 #ifdef TRAMPOLINE_TEMPLATE
6143 rtx blktramp;
6144 #endif
6145 rtx seq;
6147 #ifdef TRAMPOLINE_TEMPLATE
6148 /* First make sure this compilation has a template for
6149 initializing trampolines. */
6150 if (initial_trampoline == 0)
6152 end_temporary_allocation ();
6153 initial_trampoline
6154 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6155 resume_temporary_allocation ();
6157 ggc_add_rtx_root (&initial_trampoline, 1);
6159 #endif
6161 /* Generate insns to initialize the trampoline. */
6162 start_sequence ();
6163 tramp = round_trampoline_addr (XEXP (tramp, 0));
6164 #ifdef TRAMPOLINE_TEMPLATE
6165 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6166 emit_block_move (blktramp, initial_trampoline,
6167 GEN_INT (TRAMPOLINE_SIZE),
6168 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6169 #endif
6170 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6171 seq = get_insns ();
6172 end_sequence ();
6174 /* Put those insns at entry to the containing function (this one). */
6175 emit_insns_before (seq, tail_recursion_reentry);
6178 /* If we are doing stack checking and this function makes calls,
6179 do a stack probe at the start of the function to ensure we have enough
6180 space for another stack frame. */
6181 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6183 rtx insn, seq;
6185 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6186 if (GET_CODE (insn) == CALL_INSN)
6188 start_sequence ();
6189 probe_stack_range (STACK_CHECK_PROTECT,
6190 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6191 seq = get_insns ();
6192 end_sequence ();
6193 emit_insns_before (seq, tail_recursion_reentry);
6194 break;
6198 /* Warn about unused parms if extra warnings were specified. */
6199 if (warn_unused && extra_warnings)
6201 tree decl;
6203 for (decl = DECL_ARGUMENTS (current_function_decl);
6204 decl; decl = TREE_CHAIN (decl))
6205 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6206 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6207 warning_with_decl (decl, "unused parameter `%s'");
6210 /* Delete handlers for nonlocal gotos if nothing uses them. */
6211 if (nonlocal_goto_handler_slots != 0
6212 && ! current_function_has_nonlocal_label)
6213 delete_handlers ();
6215 /* End any sequences that failed to be closed due to syntax errors. */
6216 while (in_sequence_p ())
6217 end_sequence ();
6219 /* Outside function body, can't compute type's actual size
6220 until next function's body starts. */
6221 immediate_size_expand--;
6223 /* If doing stupid register allocation,
6224 mark register parms as dying here. */
6226 if (obey_regdecls)
6228 rtx tem;
6229 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6230 use_variable (regno_reg_rtx[i]);
6232 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6234 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6236 use_variable (XEXP (tem, 0));
6237 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6240 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6241 use_variable (current_function_internal_arg_pointer);
6244 clear_pending_stack_adjust ();
6245 do_pending_stack_adjust ();
6247 /* Mark the end of the function body.
6248 If control reaches this insn, the function can drop through
6249 without returning a value. */
6250 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6252 /* Must mark the last line number note in the function, so that the test
6253 coverage code can avoid counting the last line twice. This just tells
6254 the code to ignore the immediately following line note, since there
6255 already exists a copy of this note somewhere above. This line number
6256 note is still needed for debugging though, so we can't delete it. */
6257 if (flag_test_coverage)
6258 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6260 /* Output a linenumber for the end of the function.
6261 SDB depends on this. */
6262 emit_line_note_force (filename, line);
6264 /* Output the label for the actual return from the function,
6265 if one is expected. This happens either because a function epilogue
6266 is used instead of a return instruction, or because a return was done
6267 with a goto in order to run local cleanups, or because of pcc-style
6268 structure returning. */
6270 if (return_label)
6271 emit_label (return_label);
6273 /* C++ uses this. */
6274 if (end_bindings)
6275 expand_end_bindings (0, 0, 0);
6277 /* Now handle any leftover exception regions that may have been
6278 created for the parameters. */
6280 rtx last = get_last_insn ();
6281 rtx label;
6283 expand_leftover_cleanups ();
6285 /* If the above emitted any code, may sure we jump around it. */
6286 if (last != get_last_insn ())
6288 label = gen_label_rtx ();
6289 last = emit_jump_insn_after (gen_jump (label), last);
6290 last = emit_barrier_after (last);
6291 emit_label (label);
6295 if (current_function_instrument_entry_exit)
6297 rtx fun = DECL_RTL (current_function_decl);
6298 if (GET_CODE (fun) == MEM)
6299 fun = XEXP (fun, 0);
6300 else
6301 abort ();
6302 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6303 fun, Pmode,
6304 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6306 hard_frame_pointer_rtx),
6307 Pmode);
6310 /* If we had calls to alloca, and this machine needs
6311 an accurate stack pointer to exit the function,
6312 insert some code to save and restore the stack pointer. */
6313 #ifdef EXIT_IGNORE_STACK
6314 if (! EXIT_IGNORE_STACK)
6315 #endif
6316 if (current_function_calls_alloca)
6318 rtx tem = 0;
6320 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6321 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6324 /* If scalar return value was computed in a pseudo-reg,
6325 copy that to the hard return register. */
6326 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6327 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6328 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6329 >= FIRST_PSEUDO_REGISTER))
6331 rtx real_decl_result;
6333 #ifdef FUNCTION_OUTGOING_VALUE
6334 real_decl_result
6335 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6336 current_function_decl);
6337 #else
6338 real_decl_result
6339 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6340 current_function_decl);
6341 #endif
6342 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6343 /* If this is a BLKmode structure being returned in registers, then use
6344 the mode computed in expand_return. */
6345 if (GET_MODE (real_decl_result) == BLKmode)
6346 PUT_MODE (real_decl_result,
6347 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6348 emit_move_insn (real_decl_result,
6349 DECL_RTL (DECL_RESULT (current_function_decl)));
6350 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6352 /* The delay slot scheduler assumes that current_function_return_rtx
6353 holds the hard register containing the return value, not a temporary
6354 pseudo. */
6355 current_function_return_rtx = real_decl_result;
6358 /* If returning a structure, arrange to return the address of the value
6359 in a place where debuggers expect to find it.
6361 If returning a structure PCC style,
6362 the caller also depends on this value.
6363 And current_function_returns_pcc_struct is not necessarily set. */
6364 if (current_function_returns_struct
6365 || current_function_returns_pcc_struct)
6367 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6368 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6369 #ifdef FUNCTION_OUTGOING_VALUE
6370 rtx outgoing
6371 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6372 current_function_decl);
6373 #else
6374 rtx outgoing
6375 = FUNCTION_VALUE (build_pointer_type (type),
6376 current_function_decl);
6377 #endif
6379 /* Mark this as a function return value so integrate will delete the
6380 assignment and USE below when inlining this function. */
6381 REG_FUNCTION_VALUE_P (outgoing) = 1;
6383 emit_move_insn (outgoing, value_address);
6384 use_variable (outgoing);
6387 /* If this is an implementation of __throw, do what's necessary to
6388 communicate between __builtin_eh_return and the epilogue. */
6389 expand_eh_return ();
6391 /* Output a return insn if we are using one.
6392 Otherwise, let the rtl chain end here, to drop through
6393 into the epilogue. */
6395 #ifdef HAVE_return
6396 if (HAVE_return)
6398 emit_jump_insn (gen_return ());
6399 emit_barrier ();
6401 #endif
6403 /* Fix up any gotos that jumped out to the outermost
6404 binding level of the function.
6405 Must follow emitting RETURN_LABEL. */
6407 /* If you have any cleanups to do at this point,
6408 and they need to create temporary variables,
6409 then you will lose. */
6410 expand_fixups (get_insns ());
6413 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6414 or a single insn). */
6416 static int *
6417 record_insns (insns)
6418 rtx insns;
6420 int *vec;
6422 if (GET_CODE (insns) == SEQUENCE)
6424 int len = XVECLEN (insns, 0);
6425 vec = (int *) oballoc ((len + 1) * sizeof (int));
6426 vec[len] = 0;
6427 while (--len >= 0)
6428 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6430 else
6432 vec = (int *) oballoc (2 * sizeof (int));
6433 vec[0] = INSN_UID (insns);
6434 vec[1] = 0;
6436 return vec;
6439 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6441 static int
6442 contains (insn, vec)
6443 rtx insn;
6444 int *vec;
6446 register int i, j;
6448 if (GET_CODE (insn) == INSN
6449 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6451 int count = 0;
6452 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6453 for (j = 0; vec[j]; j++)
6454 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6455 count++;
6456 return count;
6458 else
6460 for (j = 0; vec[j]; j++)
6461 if (INSN_UID (insn) == vec[j])
6462 return 1;
6464 return 0;
6468 prologue_epilogue_contains (insn)
6469 rtx insn;
6471 if (prologue && contains (insn, prologue))
6472 return 1;
6473 if (epilogue && contains (insn, epilogue))
6474 return 1;
6475 return 0;
6478 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6479 this into place with notes indicating where the prologue ends and where
6480 the epilogue begins. Update the basic block information when possible. */
6482 void
6483 thread_prologue_and_epilogue_insns (f)
6484 rtx f ATTRIBUTE_UNUSED;
6486 int insertted = 0;
6488 #ifdef HAVE_prologue
6489 if (HAVE_prologue)
6491 rtx seq;
6493 start_sequence ();
6494 seq = gen_prologue();
6495 emit_insn (seq);
6497 /* Retain a map of the prologue insns. */
6498 if (GET_CODE (seq) != SEQUENCE)
6499 seq = get_insns ();
6500 prologue = record_insns (seq);
6502 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6503 seq = gen_sequence ();
6504 end_sequence ();
6506 /* If optimization is off, and perhaps in an empty function,
6507 the entry block will have no successors. */
6508 if (ENTRY_BLOCK_PTR->succ)
6510 /* Can't deal with multiple successsors of the entry block. */
6511 if (ENTRY_BLOCK_PTR->succ->succ_next)
6512 abort ();
6514 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6515 insertted = 1;
6517 else
6518 emit_insn_after (seq, f);
6520 #endif
6522 #ifdef HAVE_epilogue
6523 if (HAVE_epilogue)
6525 edge e;
6526 basic_block bb = 0;
6527 rtx tail = get_last_insn ();
6529 /* ??? This is gastly. If function returns were not done via uses,
6530 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6531 and all of this uglyness would go away. */
6533 switch (optimize)
6535 default:
6536 /* If the exit block has no non-fake predecessors, we don't
6537 need an epilogue. Furthermore, only pay attention to the
6538 fallthru predecessors; if (conditional) return insns were
6539 generated, by definition we do not need to emit epilogue
6540 insns. */
6542 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6543 if ((e->flags & EDGE_FAKE) == 0
6544 && (e->flags & EDGE_FALLTHRU) != 0)
6545 break;
6546 if (e == NULL)
6547 break;
6549 /* We can't handle multiple epilogues -- if one is needed,
6550 we won't be able to place it multiple times.
6552 ??? Fix epilogue expanders to not assume they are the
6553 last thing done compiling the function. Either that
6554 or copy_rtx each insn.
6556 ??? Blah, it's not a simple expression to assert that
6557 we've exactly one fallthru exit edge. */
6559 bb = e->src;
6560 tail = bb->end;
6562 /* ??? If the last insn of the basic block is a jump, then we
6563 are creating a new basic block. Wimp out and leave these
6564 insns outside any block. */
6565 if (GET_CODE (tail) == JUMP_INSN)
6566 bb = 0;
6568 /* FALLTHRU */
6569 case 0:
6571 rtx prev, seq, first_use;
6573 /* Move the USE insns at the end of a function onto a list. */
6574 prev = tail;
6575 if (GET_CODE (prev) == BARRIER
6576 || GET_CODE (prev) == NOTE)
6577 prev = prev_nonnote_insn (prev);
6579 first_use = 0;
6580 if (prev
6581 && GET_CODE (prev) == INSN
6582 && GET_CODE (PATTERN (prev)) == USE)
6584 /* If the end of the block is the use, grab hold of something
6585 else so that we emit barriers etc in the right place. */
6586 if (prev == tail)
6589 tail = PREV_INSN (tail);
6590 while (GET_CODE (tail) == INSN
6591 && GET_CODE (PATTERN (tail)) == USE);
6596 rtx use = prev;
6597 prev = prev_nonnote_insn (prev);
6599 remove_insn (use);
6600 if (first_use)
6602 NEXT_INSN (use) = first_use;
6603 PREV_INSN (first_use) = use;
6605 else
6606 NEXT_INSN (use) = NULL_RTX;
6607 first_use = use;
6609 while (prev
6610 && GET_CODE (prev) == INSN
6611 && GET_CODE (PATTERN (prev)) == USE);
6614 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6615 epilogue insns, the USE insns at the end of a function,
6616 the jump insn that returns, and then a BARRIER. */
6618 if (GET_CODE (tail) != BARRIER)
6620 prev = next_nonnote_insn (tail);
6621 if (!prev || GET_CODE (prev) != BARRIER)
6622 emit_barrier_after (tail);
6625 seq = gen_epilogue ();
6626 prev = tail;
6627 tail = emit_jump_insn_after (seq, tail);
6629 /* Insert the USE insns immediately before the return insn, which
6630 must be the last instruction emitted in the sequence. */
6631 if (first_use)
6632 emit_insns_before (first_use, tail);
6633 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6635 /* Update the tail of the basic block. */
6636 if (bb)
6637 bb->end = tail;
6639 /* Retain a map of the epilogue insns. */
6640 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6644 #endif
6646 if (insertted)
6647 commit_edge_insertions ();
6650 /* Reposition the prologue-end and epilogue-begin notes after instruction
6651 scheduling and delayed branch scheduling. */
6653 void
6654 reposition_prologue_and_epilogue_notes (f)
6655 rtx f ATTRIBUTE_UNUSED;
6657 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6658 /* Reposition the prologue and epilogue notes. */
6659 if (n_basic_blocks)
6661 int len;
6663 if (prologue)
6665 register rtx insn, note = 0;
6667 /* Scan from the beginning until we reach the last prologue insn.
6668 We apparently can't depend on basic_block_{head,end} after
6669 reorg has run. */
6670 for (len = 0; prologue[len]; len++)
6672 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6674 if (GET_CODE (insn) == NOTE)
6676 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6677 note = insn;
6679 else if ((len -= contains (insn, prologue)) == 0)
6681 rtx next;
6682 /* Find the prologue-end note if we haven't already, and
6683 move it to just after the last prologue insn. */
6684 if (note == 0)
6686 for (note = insn; (note = NEXT_INSN (note));)
6687 if (GET_CODE (note) == NOTE
6688 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6689 break;
6692 next = NEXT_INSN (note);
6694 /* Whether or not we can depend on BLOCK_HEAD,
6695 attempt to keep it up-to-date. */
6696 if (BLOCK_HEAD (0) == note)
6697 BLOCK_HEAD (0) = next;
6699 remove_insn (note);
6700 add_insn_after (note, insn);
6705 if (epilogue)
6707 register rtx insn, note = 0;
6709 /* Scan from the end until we reach the first epilogue insn.
6710 We apparently can't depend on basic_block_{head,end} after
6711 reorg has run. */
6712 for (len = 0; epilogue[len]; len++)
6714 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6716 if (GET_CODE (insn) == NOTE)
6718 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6719 note = insn;
6721 else if ((len -= contains (insn, epilogue)) == 0)
6723 /* Find the epilogue-begin note if we haven't already, and
6724 move it to just before the first epilogue insn. */
6725 if (note == 0)
6727 for (note = insn; (note = PREV_INSN (note));)
6728 if (GET_CODE (note) == NOTE
6729 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6730 break;
6733 /* Whether or not we can depend on BLOCK_HEAD,
6734 attempt to keep it up-to-date. */
6735 if (n_basic_blocks
6736 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6737 BLOCK_HEAD (n_basic_blocks-1) = note;
6739 remove_insn (note);
6740 add_insn_before (note, insn);
6745 #endif /* HAVE_prologue or HAVE_epilogue */
6748 /* Mark T for GC. */
6750 static void
6751 mark_temp_slot (t)
6752 struct temp_slot *t;
6754 while (t)
6756 ggc_mark_rtx (t->slot);
6757 ggc_mark_rtx (t->address);
6758 ggc_mark_tree (t->rtl_expr);
6760 t = t->next;
6764 /* Mark P for GC. */
6766 static void
6767 mark_function_status (p)
6768 struct function *p;
6770 int i;
6771 rtx *r;
6773 if (p == 0)
6774 return;
6776 ggc_mark_rtx (p->arg_offset_rtx);
6778 if (p->x_parm_reg_stack_loc)
6779 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6780 i > 0; --i, ++r)
6781 ggc_mark_rtx (*r);
6783 ggc_mark_rtx (p->return_rtx);
6784 ggc_mark_rtx (p->x_cleanup_label);
6785 ggc_mark_rtx (p->x_return_label);
6786 ggc_mark_rtx (p->x_save_expr_regs);
6787 ggc_mark_rtx (p->x_stack_slot_list);
6788 ggc_mark_rtx (p->x_parm_birth_insn);
6789 ggc_mark_rtx (p->x_tail_recursion_label);
6790 ggc_mark_rtx (p->x_tail_recursion_reentry);
6791 ggc_mark_rtx (p->internal_arg_pointer);
6792 ggc_mark_rtx (p->x_arg_pointer_save_area);
6793 ggc_mark_tree (p->x_rtl_expr_chain);
6794 ggc_mark_rtx (p->x_last_parm_insn);
6795 ggc_mark_tree (p->x_context_display);
6796 ggc_mark_tree (p->x_trampoline_list);
6797 ggc_mark_rtx (p->epilogue_delay_list);
6799 mark_temp_slot (p->x_temp_slots);
6802 struct var_refs_queue *q = p->fixup_var_refs_queue;
6803 while (q)
6805 ggc_mark_rtx (q->modified);
6806 q = q->next;
6810 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6811 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6812 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6813 ggc_mark_tree (p->x_nonlocal_labels);
6816 /* Mark the function chain ARG (which is really a struct function **)
6817 for GC. */
6819 static void
6820 mark_function_chain (arg)
6821 void *arg;
6823 struct function *f = *(struct function **) arg;
6825 for (; f; f = f->next_global)
6827 ggc_mark_tree (f->decl);
6829 mark_function_status (f);
6830 mark_eh_status (f->eh);
6831 mark_stmt_status (f->stmt);
6832 mark_expr_status (f->expr);
6833 mark_emit_status (f->emit);
6834 mark_varasm_status (f->varasm);
6836 if (mark_machine_status)
6837 (*mark_machine_status) (f);
6838 if (mark_lang_status)
6839 (*mark_lang_status) (f);
6841 if (f->original_arg_vector)
6842 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6843 if (f->original_decl_initial)
6844 ggc_mark_tree (f->original_decl_initial);
6848 /* Called once, at initialization, to initialize function.c. */
6850 void
6851 init_function_once ()
6853 ggc_add_root (&all_functions, 1, sizeof all_functions,
6854 mark_function_chain);