Avoid unnecessary dependencies on COND_EXEC insns.
[official-gcc.git] / gcc / function.c
blob0dabd918ea4d95f46b4b35bde9c1f51a19f9838e
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
63 #ifndef ACCUMULATE_OUTGOING_ARGS
64 #define ACCUMULATE_OUTGOING_ARGS 0
65 #endif
67 #ifndef TRAMPOLINE_ALIGNMENT
68 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #endif
71 #ifndef LOCAL_ALIGNMENT
72 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #endif
75 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
76 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
77 #endif
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #define SYMBOL__MAIN __main
86 #endif
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
98 during rtl generation. If they are different register numbers, this is
99 always true. It may also be true if
100 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
101 generation. See fix_lexical_addr for details. */
103 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
104 #define NEED_SEPARATE_AP
105 #endif
107 /* Nonzero if function being compiled doesn't contain any calls
108 (ignoring the prologue and epilogue). This is set prior to
109 local register allocation and is valid for the remaining
110 compiler passes. */
111 int current_function_is_leaf;
113 /* Nonzero if function being compiled doesn't contain any instructions
114 that can throw an exception. This is set prior to final. */
116 int current_function_nothrow;
118 /* Nonzero if function being compiled doesn't modify the stack pointer
119 (ignoring the prologue and epilogue). This is only valid after
120 life_analysis has run. */
121 int current_function_sp_is_unchanging;
123 /* Nonzero if the function being compiled is a leaf function which only
124 uses leaf registers. This is valid after reload (specifically after
125 sched2) and is useful only if the port defines LEAF_REGISTERS. */
126 int current_function_uses_only_leaf_regs;
128 /* Nonzero once virtual register instantiation has been done.
129 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
130 static int virtuals_instantiated;
132 /* These variables hold pointers to functions to
133 save and restore machine-specific data,
134 in push_function_context and pop_function_context. */
135 void (*init_machine_status) PARAMS ((struct function *));
136 void (*save_machine_status) PARAMS ((struct function *));
137 void (*restore_machine_status) PARAMS ((struct function *));
138 void (*mark_machine_status) PARAMS ((struct function *));
139 void (*free_machine_status) PARAMS ((struct function *));
141 /* Likewise, but for language-specific data. */
142 void (*init_lang_status) PARAMS ((struct function *));
143 void (*save_lang_status) PARAMS ((struct function *));
144 void (*restore_lang_status) PARAMS ((struct function *));
145 void (*mark_lang_status) PARAMS ((struct function *));
146 void (*free_lang_status) PARAMS ((struct function *));
148 /* The FUNCTION_DECL for an inline function currently being expanded. */
149 tree inline_function_decl;
151 /* The currently compiled function. */
152 struct function *cfun = 0;
154 /* Global list of all compiled functions. */
155 struct function *all_functions = 0;
157 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
158 static varray_type prologue;
159 static varray_type epilogue;
161 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
162 in this function. */
163 static varray_type sibcall_epilogue;
165 /* In order to evaluate some expressions, such as function calls returning
166 structures in memory, we need to temporarily allocate stack locations.
167 We record each allocated temporary in the following structure.
169 Associated with each temporary slot is a nesting level. When we pop up
170 one level, all temporaries associated with the previous level are freed.
171 Normally, all temporaries are freed after the execution of the statement
172 in which they were created. However, if we are inside a ({...}) grouping,
173 the result may be in a temporary and hence must be preserved. If the
174 result could be in a temporary, we preserve it if we can determine which
175 one it is in. If we cannot determine which temporary may contain the
176 result, all temporaries are preserved. A temporary is preserved by
177 pretending it was allocated at the previous nesting level.
179 Automatic variables are also assigned temporary slots, at the nesting
180 level where they are defined. They are marked a "kept" so that
181 free_temp_slots will not free them. */
183 struct temp_slot
185 /* Points to next temporary slot. */
186 struct temp_slot *next;
187 /* The rtx to used to reference the slot. */
188 rtx slot;
189 /* The rtx used to represent the address if not the address of the
190 slot above. May be an EXPR_LIST if multiple addresses exist. */
191 rtx address;
192 /* The alignment (in bits) of the slot. */
193 int align;
194 /* The size, in units, of the slot. */
195 HOST_WIDE_INT size;
196 /* The alias set for the slot. If the alias set is zero, we don't
197 know anything about the alias set of the slot. We must only
198 reuse a slot if it is assigned an object of the same alias set.
199 Otherwise, the rest of the compiler may assume that the new use
200 of the slot cannot alias the old use of the slot, which is
201 false. If the slot has alias set zero, then we can't reuse the
202 slot at all, since we have no idea what alias set may have been
203 imposed on the memory. For example, if the stack slot is the
204 call frame for an inline functioned, we have no idea what alias
205 sets will be assigned to various pieces of the call frame. */
206 HOST_WIDE_INT alias_set;
207 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
208 tree rtl_expr;
209 /* Non-zero if this temporary is currently in use. */
210 char in_use;
211 /* Non-zero if this temporary has its address taken. */
212 char addr_taken;
213 /* Nesting level at which this slot is being used. */
214 int level;
215 /* Non-zero if this should survive a call to free_temp_slots. */
216 int keep;
217 /* The offset of the slot from the frame_pointer, including extra space
218 for alignment. This info is for combine_temp_slots. */
219 HOST_WIDE_INT base_offset;
220 /* The size of the slot, including extra space for alignment. This
221 info is for combine_temp_slots. */
222 HOST_WIDE_INT full_size;
225 /* This structure is used to record MEMs or pseudos used to replace VAR, any
226 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
227 maintain this list in case two operands of an insn were required to match;
228 in that case we must ensure we use the same replacement. */
230 struct fixup_replacement
232 rtx old;
233 rtx new;
234 struct fixup_replacement *next;
237 struct insns_for_mem_entry {
238 /* The KEY in HE will be a MEM. */
239 struct hash_entry he;
240 /* These are the INSNS which reference the MEM. */
241 rtx insns;
244 /* Forward declarations. */
246 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
247 int, struct function *));
248 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
249 HOST_WIDE_INT, int, tree));
250 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
251 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
252 enum machine_mode, enum machine_mode,
253 int, unsigned int, int,
254 struct hash_table *));
255 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
256 enum machine_mode,
257 struct hash_table *));
258 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
259 struct hash_table *));
260 static struct fixup_replacement
261 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
262 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
263 rtx, int, struct hash_table *));
264 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
265 struct fixup_replacement **));
266 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
267 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
268 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
269 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
270 static void instantiate_decls PARAMS ((tree, int));
271 static void instantiate_decls_1 PARAMS ((tree, int));
272 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
273 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
274 static void delete_handlers PARAMS ((void));
275 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
276 struct args_size *));
277 #ifndef ARGS_GROW_DOWNWARD
278 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
279 tree));
280 #endif
281 static rtx round_trampoline_addr PARAMS ((rtx));
282 static rtx adjust_trampoline_addr PARAMS ((rtx));
283 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
284 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
285 static tree blocks_nreverse PARAMS ((tree));
286 static int all_blocks PARAMS ((tree, tree *));
287 static tree *get_block_vector PARAMS ((tree, int *));
288 /* We always define `record_insns' even if its not used so that we
289 can always export `prologue_epilogue_contains'. */
290 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
291 static int contains PARAMS ((rtx, varray_type));
292 #ifdef HAVE_return
293 static void emit_return_into_block PARAMS ((basic_block, rtx));
294 #endif
295 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
296 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
297 struct hash_table *));
298 static void purge_single_hard_subreg_set PARAMS ((rtx));
299 #ifdef HAVE_epilogue
300 static void keep_stack_depressed PARAMS ((rtx));
301 #endif
302 static int is_addressof PARAMS ((rtx *, void *));
303 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
304 struct hash_table *,
305 hash_table_key));
306 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
307 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
308 static int insns_for_mem_walk PARAMS ((rtx *, void *));
309 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
310 static void mark_temp_slot PARAMS ((struct temp_slot *));
311 static void mark_function_status PARAMS ((struct function *));
312 static void mark_function_chain PARAMS ((void *));
313 static void prepare_function_start PARAMS ((void));
314 static void do_clobber_return_reg PARAMS ((rtx, void *));
315 static void do_use_return_reg PARAMS ((rtx, void *));
317 /* Pointer to chain of `struct function' for containing functions. */
318 struct function *outer_function_chain;
320 /* Given a function decl for a containing function,
321 return the `struct function' for it. */
323 struct function *
324 find_function_data (decl)
325 tree decl;
327 struct function *p;
329 for (p = outer_function_chain; p; p = p->next)
330 if (p->decl == decl)
331 return p;
333 abort ();
336 /* Save the current context for compilation of a nested function.
337 This is called from language-specific code. The caller should use
338 the save_lang_status callback to save any language-specific state,
339 since this function knows only about language-independent
340 variables. */
342 void
343 push_function_context_to (context)
344 tree context;
346 struct function *p, *context_data;
348 if (context)
350 context_data = (context == current_function_decl
351 ? cfun
352 : find_function_data (context));
353 context_data->contains_functions = 1;
356 if (cfun == 0)
357 init_dummy_function_start ();
358 p = cfun;
360 p->next = outer_function_chain;
361 outer_function_chain = p;
362 p->fixup_var_refs_queue = 0;
364 if (save_lang_status)
365 (*save_lang_status) (p);
366 if (save_machine_status)
367 (*save_machine_status) (p);
369 cfun = 0;
372 void
373 push_function_context ()
375 push_function_context_to (current_function_decl);
378 /* Restore the last saved context, at the end of a nested function.
379 This function is called from language-specific code. */
381 void
382 pop_function_context_from (context)
383 tree context ATTRIBUTE_UNUSED;
385 struct function *p = outer_function_chain;
386 struct var_refs_queue *queue;
387 struct var_refs_queue *next;
389 cfun = p;
390 outer_function_chain = p->next;
392 current_function_decl = p->decl;
393 reg_renumber = 0;
395 restore_emit_status (p);
397 if (restore_machine_status)
398 (*restore_machine_status) (p);
399 if (restore_lang_status)
400 (*restore_lang_status) (p);
402 /* Finish doing put_var_into_stack for any of our variables
403 which became addressable during the nested function. */
404 for (queue = p->fixup_var_refs_queue; queue; queue = next)
406 next = queue->next;
407 fixup_var_refs (queue->modified, queue->promoted_mode,
408 queue->unsignedp, 0);
409 free (queue);
411 p->fixup_var_refs_queue = 0;
413 /* Reset variables that have known state during rtx generation. */
414 rtx_equal_function_value_matters = 1;
415 virtuals_instantiated = 0;
416 generating_concat_p = 1;
419 void
420 pop_function_context ()
422 pop_function_context_from (current_function_decl);
425 /* Clear out all parts of the state in F that can safely be discarded
426 after the function has been parsed, but not compiled, to let
427 garbage collection reclaim the memory. */
429 void
430 free_after_parsing (f)
431 struct function *f;
433 /* f->expr->forced_labels is used by code generation. */
434 /* f->emit->regno_reg_rtx is used by code generation. */
435 /* f->varasm is used by code generation. */
436 /* f->eh->eh_return_stub_label is used by code generation. */
438 if (free_lang_status)
439 (*free_lang_status) (f);
440 free_stmt_status (f);
443 /* Clear out all parts of the state in F that can safely be discarded
444 after the function has been compiled, to let garbage collection
445 reclaim the memory. */
447 void
448 free_after_compilation (f)
449 struct function *f;
451 struct temp_slot *ts;
452 struct temp_slot *next;
454 free_eh_status (f);
455 free_expr_status (f);
456 free_emit_status (f);
457 free_varasm_status (f);
459 if (free_machine_status)
460 (*free_machine_status) (f);
462 if (f->x_parm_reg_stack_loc)
463 free (f->x_parm_reg_stack_loc);
465 for (ts = f->x_temp_slots; ts; ts = next)
467 next = ts->next;
468 free (ts);
470 f->x_temp_slots = NULL;
472 f->arg_offset_rtx = NULL;
473 f->return_rtx = NULL;
474 f->internal_arg_pointer = NULL;
475 f->x_nonlocal_labels = NULL;
476 f->x_nonlocal_goto_handler_slots = NULL;
477 f->x_nonlocal_goto_handler_labels = NULL;
478 f->x_nonlocal_goto_stack_level = NULL;
479 f->x_cleanup_label = NULL;
480 f->x_return_label = NULL;
481 f->x_save_expr_regs = NULL;
482 f->x_stack_slot_list = NULL;
483 f->x_rtl_expr_chain = NULL;
484 f->x_tail_recursion_label = NULL;
485 f->x_tail_recursion_reentry = NULL;
486 f->x_arg_pointer_save_area = NULL;
487 f->x_clobber_return_insn = NULL;
488 f->x_context_display = NULL;
489 f->x_trampoline_list = NULL;
490 f->x_parm_birth_insn = NULL;
491 f->x_last_parm_insn = NULL;
492 f->x_parm_reg_stack_loc = NULL;
493 f->fixup_var_refs_queue = NULL;
494 f->original_arg_vector = NULL;
495 f->original_decl_initial = NULL;
496 f->inl_last_parm_insn = NULL;
497 f->epilogue_delay_list = NULL;
500 /* Allocate fixed slots in the stack frame of the current function. */
502 /* Return size needed for stack frame based on slots so far allocated in
503 function F.
504 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
505 the caller may have to do that. */
507 HOST_WIDE_INT
508 get_func_frame_size (f)
509 struct function *f;
511 #ifdef FRAME_GROWS_DOWNWARD
512 return -f->x_frame_offset;
513 #else
514 return f->x_frame_offset;
515 #endif
518 /* Return size needed for stack frame based on slots so far allocated.
519 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
520 the caller may have to do that. */
521 HOST_WIDE_INT
522 get_frame_size ()
524 return get_func_frame_size (cfun);
527 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
528 with machine mode MODE.
530 ALIGN controls the amount of alignment for the address of the slot:
531 0 means according to MODE,
532 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
533 positive specifies alignment boundary in bits.
535 We do not round to stack_boundary here.
537 FUNCTION specifies the function to allocate in. */
539 static rtx
540 assign_stack_local_1 (mode, size, align, function)
541 enum machine_mode mode;
542 HOST_WIDE_INT size;
543 int align;
544 struct function *function;
546 register rtx x, addr;
547 int bigend_correction = 0;
548 int alignment;
550 if (align == 0)
552 tree type;
554 if (mode == BLKmode)
555 alignment = BIGGEST_ALIGNMENT;
556 else
557 alignment = GET_MODE_ALIGNMENT (mode);
559 /* Allow the target to (possibly) increase the alignment of this
560 stack slot. */
561 type = type_for_mode (mode, 0);
562 if (type)
563 alignment = LOCAL_ALIGNMENT (type, alignment);
565 alignment /= BITS_PER_UNIT;
567 else if (align == -1)
569 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
570 size = CEIL_ROUND (size, alignment);
572 else
573 alignment = align / BITS_PER_UNIT;
575 #ifdef FRAME_GROWS_DOWNWARD
576 function->x_frame_offset -= size;
577 #endif
579 /* Ignore alignment we can't do with expected alignment of the boundary. */
580 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
581 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
583 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
584 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
586 /* Round frame offset to that alignment.
587 We must be careful here, since FRAME_OFFSET might be negative and
588 division with a negative dividend isn't as well defined as we might
589 like. So we instead assume that ALIGNMENT is a power of two and
590 use logical operations which are unambiguous. */
591 #ifdef FRAME_GROWS_DOWNWARD
592 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
593 #else
594 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
595 #endif
597 /* On a big-endian machine, if we are allocating more space than we will use,
598 use the least significant bytes of those that are allocated. */
599 if (BYTES_BIG_ENDIAN && mode != BLKmode)
600 bigend_correction = size - GET_MODE_SIZE (mode);
602 /* If we have already instantiated virtual registers, return the actual
603 address relative to the frame pointer. */
604 if (function == cfun && virtuals_instantiated)
605 addr = plus_constant (frame_pointer_rtx,
606 (frame_offset + bigend_correction
607 + STARTING_FRAME_OFFSET));
608 else
609 addr = plus_constant (virtual_stack_vars_rtx,
610 function->x_frame_offset + bigend_correction);
612 #ifndef FRAME_GROWS_DOWNWARD
613 function->x_frame_offset += size;
614 #endif
616 x = gen_rtx_MEM (mode, addr);
618 function->x_stack_slot_list
619 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
621 return x;
624 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
625 current function. */
628 assign_stack_local (mode, size, align)
629 enum machine_mode mode;
630 HOST_WIDE_INT size;
631 int align;
633 return assign_stack_local_1 (mode, size, align, cfun);
636 /* Allocate a temporary stack slot and record it for possible later
637 reuse.
639 MODE is the machine mode to be given to the returned rtx.
641 SIZE is the size in units of the space required. We do no rounding here
642 since assign_stack_local will do any required rounding.
644 KEEP is 1 if this slot is to be retained after a call to
645 free_temp_slots. Automatic variables for a block are allocated
646 with this flag. KEEP is 2 if we allocate a longer term temporary,
647 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
648 if we are to allocate something at an inner level to be treated as
649 a variable in the block (e.g., a SAVE_EXPR).
651 TYPE is the type that will be used for the stack slot. */
653 static rtx
654 assign_stack_temp_for_type (mode, size, keep, type)
655 enum machine_mode mode;
656 HOST_WIDE_INT size;
657 int keep;
658 tree type;
660 int align;
661 HOST_WIDE_INT alias_set;
662 struct temp_slot *p, *best_p = 0;
664 /* If SIZE is -1 it means that somebody tried to allocate a temporary
665 of a variable size. */
666 if (size == -1)
667 abort ();
669 /* If we know the alias set for the memory that will be used, use
670 it. If there's no TYPE, then we don't know anything about the
671 alias set for the memory. */
672 if (type)
673 alias_set = get_alias_set (type);
674 else
675 alias_set = 0;
677 if (mode == BLKmode)
678 align = BIGGEST_ALIGNMENT;
679 else
680 align = GET_MODE_ALIGNMENT (mode);
682 if (! type)
683 type = type_for_mode (mode, 0);
685 if (type)
686 align = LOCAL_ALIGNMENT (type, align);
688 /* Try to find an available, already-allocated temporary of the proper
689 mode which meets the size and alignment requirements. Choose the
690 smallest one with the closest alignment. */
691 for (p = temp_slots; p; p = p->next)
692 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
693 && ! p->in_use
694 && (! flag_strict_aliasing
695 || (alias_set && p->alias_set == alias_set))
696 && (best_p == 0 || best_p->size > p->size
697 || (best_p->size == p->size && best_p->align > p->align)))
699 if (p->align == align && p->size == size)
701 best_p = 0;
702 break;
704 best_p = p;
707 /* Make our best, if any, the one to use. */
708 if (best_p)
710 /* If there are enough aligned bytes left over, make them into a new
711 temp_slot so that the extra bytes don't get wasted. Do this only
712 for BLKmode slots, so that we can be sure of the alignment. */
713 if (GET_MODE (best_p->slot) == BLKmode)
715 int alignment = best_p->align / BITS_PER_UNIT;
716 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
718 if (best_p->size - rounded_size >= alignment)
720 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
721 p->in_use = p->addr_taken = 0;
722 p->size = best_p->size - rounded_size;
723 p->base_offset = best_p->base_offset + rounded_size;
724 p->full_size = best_p->full_size - rounded_size;
725 p->slot = gen_rtx_MEM (BLKmode,
726 plus_constant (XEXP (best_p->slot, 0),
727 rounded_size));
728 p->align = best_p->align;
729 p->address = 0;
730 p->rtl_expr = 0;
731 p->alias_set = best_p->alias_set;
732 p->next = temp_slots;
733 temp_slots = p;
735 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
736 stack_slot_list);
738 best_p->size = rounded_size;
739 best_p->full_size = rounded_size;
743 p = best_p;
746 /* If we still didn't find one, make a new temporary. */
747 if (p == 0)
749 HOST_WIDE_INT frame_offset_old = frame_offset;
751 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
753 /* We are passing an explicit alignment request to assign_stack_local.
754 One side effect of that is assign_stack_local will not round SIZE
755 to ensure the frame offset remains suitably aligned.
757 So for requests which depended on the rounding of SIZE, we go ahead
758 and round it now. We also make sure ALIGNMENT is at least
759 BIGGEST_ALIGNMENT. */
760 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
761 abort();
762 p->slot = assign_stack_local (mode,
763 (mode == BLKmode
764 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
765 : size),
766 align);
768 p->align = align;
769 p->alias_set = alias_set;
771 /* The following slot size computation is necessary because we don't
772 know the actual size of the temporary slot until assign_stack_local
773 has performed all the frame alignment and size rounding for the
774 requested temporary. Note that extra space added for alignment
775 can be either above or below this stack slot depending on which
776 way the frame grows. We include the extra space if and only if it
777 is above this slot. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 p->size = frame_offset_old - frame_offset;
780 #else
781 p->size = size;
782 #endif
784 /* Now define the fields used by combine_temp_slots. */
785 #ifdef FRAME_GROWS_DOWNWARD
786 p->base_offset = frame_offset;
787 p->full_size = frame_offset_old - frame_offset;
788 #else
789 p->base_offset = frame_offset_old;
790 p->full_size = frame_offset - frame_offset_old;
791 #endif
792 p->address = 0;
793 p->next = temp_slots;
794 temp_slots = p;
797 p->in_use = 1;
798 p->addr_taken = 0;
799 p->rtl_expr = seq_rtl_expr;
801 if (keep == 2)
803 p->level = target_temp_slot_level;
804 p->keep = 0;
806 else if (keep == 3)
808 p->level = var_temp_slot_level;
809 p->keep = 0;
811 else
813 p->level = temp_slot_level;
814 p->keep = keep;
817 /* We may be reusing an old slot, so clear any MEM flags that may have been
818 set from before. */
819 RTX_UNCHANGING_P (p->slot) = 0;
820 MEM_IN_STRUCT_P (p->slot) = 0;
821 MEM_SCALAR_P (p->slot) = 0;
822 MEM_ALIAS_SET (p->slot) = alias_set;
824 if (type != 0)
825 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
827 return p->slot;
830 /* Allocate a temporary stack slot and record it for possible later
831 reuse. First three arguments are same as in preceding function. */
834 assign_stack_temp (mode, size, keep)
835 enum machine_mode mode;
836 HOST_WIDE_INT size;
837 int keep;
839 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
842 /* Assign a temporary of given TYPE.
843 KEEP is as for assign_stack_temp.
844 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
845 it is 0 if a register is OK.
846 DONT_PROMOTE is 1 if we should not promote values in register
847 to wider modes. */
850 assign_temp (type, keep, memory_required, dont_promote)
851 tree type;
852 int keep;
853 int memory_required;
854 int dont_promote ATTRIBUTE_UNUSED;
856 enum machine_mode mode = TYPE_MODE (type);
857 #ifndef PROMOTE_FOR_CALL_ONLY
858 int unsignedp = TREE_UNSIGNED (type);
859 #endif
861 if (mode == BLKmode || memory_required)
863 HOST_WIDE_INT size = int_size_in_bytes (type);
864 rtx tmp;
866 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
867 problems with allocating the stack space. */
868 if (size == 0)
869 size = 1;
871 /* Unfortunately, we don't yet know how to allocate variable-sized
872 temporaries. However, sometimes we have a fixed upper limit on
873 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
874 instead. This is the case for Chill variable-sized strings. */
875 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
876 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
877 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
878 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
880 tmp = assign_stack_temp_for_type (mode, size, keep, type);
881 return tmp;
884 #ifndef PROMOTE_FOR_CALL_ONLY
885 if (! dont_promote)
886 mode = promote_mode (type, mode, &unsignedp, 0);
887 #endif
889 return gen_reg_rtx (mode);
892 /* Combine temporary stack slots which are adjacent on the stack.
894 This allows for better use of already allocated stack space. This is only
895 done for BLKmode slots because we can be sure that we won't have alignment
896 problems in this case. */
898 void
899 combine_temp_slots ()
901 struct temp_slot *p, *q;
902 struct temp_slot *prev_p, *prev_q;
903 int num_slots;
905 /* We can't combine slots, because the information about which slot
906 is in which alias set will be lost. */
907 if (flag_strict_aliasing)
908 return;
910 /* If there are a lot of temp slots, don't do anything unless
911 high levels of optimizaton. */
912 if (! flag_expensive_optimizations)
913 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
914 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
915 return;
917 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
919 int delete_p = 0;
921 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
922 for (q = p->next, prev_q = p; q; q = prev_q->next)
924 int delete_q = 0;
925 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
927 if (p->base_offset + p->full_size == q->base_offset)
929 /* Q comes after P; combine Q into P. */
930 p->size += q->size;
931 p->full_size += q->full_size;
932 delete_q = 1;
934 else if (q->base_offset + q->full_size == p->base_offset)
936 /* P comes after Q; combine P into Q. */
937 q->size += p->size;
938 q->full_size += p->full_size;
939 delete_p = 1;
940 break;
943 /* Either delete Q or advance past it. */
944 if (delete_q)
946 prev_q->next = q->next;
947 free (q);
949 else
950 prev_q = q;
952 /* Either delete P or advance past it. */
953 if (delete_p)
955 if (prev_p)
956 prev_p->next = p->next;
957 else
958 temp_slots = p->next;
960 else
961 prev_p = p;
965 /* Find the temp slot corresponding to the object at address X. */
967 static struct temp_slot *
968 find_temp_slot_from_address (x)
969 rtx x;
971 struct temp_slot *p;
972 rtx next;
974 for (p = temp_slots; p; p = p->next)
976 if (! p->in_use)
977 continue;
979 else if (XEXP (p->slot, 0) == x
980 || p->address == x
981 || (GET_CODE (x) == PLUS
982 && XEXP (x, 0) == virtual_stack_vars_rtx
983 && GET_CODE (XEXP (x, 1)) == CONST_INT
984 && INTVAL (XEXP (x, 1)) >= p->base_offset
985 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
986 return p;
988 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
989 for (next = p->address; next; next = XEXP (next, 1))
990 if (XEXP (next, 0) == x)
991 return p;
994 /* If we have a sum involving a register, see if it points to a temp
995 slot. */
996 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
997 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
998 return p;
999 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1000 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1001 return p;
1003 return 0;
1006 /* Indicate that NEW is an alternate way of referring to the temp slot
1007 that previously was known by OLD. */
1009 void
1010 update_temp_slot_address (old, new)
1011 rtx old, new;
1013 struct temp_slot *p;
1015 if (rtx_equal_p (old, new))
1016 return;
1018 p = find_temp_slot_from_address (old);
1020 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1021 is a register, see if one operand of the PLUS is a temporary
1022 location. If so, NEW points into it. Otherwise, if both OLD and
1023 NEW are a PLUS and if there is a register in common between them.
1024 If so, try a recursive call on those values. */
1025 if (p == 0)
1027 if (GET_CODE (old) != PLUS)
1028 return;
1030 if (GET_CODE (new) == REG)
1032 update_temp_slot_address (XEXP (old, 0), new);
1033 update_temp_slot_address (XEXP (old, 1), new);
1034 return;
1036 else if (GET_CODE (new) != PLUS)
1037 return;
1039 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1040 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1041 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1042 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1043 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1044 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1045 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1046 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1048 return;
1051 /* Otherwise add an alias for the temp's address. */
1052 else if (p->address == 0)
1053 p->address = new;
1054 else
1056 if (GET_CODE (p->address) != EXPR_LIST)
1057 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1059 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1063 /* If X could be a reference to a temporary slot, mark the fact that its
1064 address was taken. */
1066 void
1067 mark_temp_addr_taken (x)
1068 rtx x;
1070 struct temp_slot *p;
1072 if (x == 0)
1073 return;
1075 /* If X is not in memory or is at a constant address, it cannot be in
1076 a temporary slot. */
1077 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1078 return;
1080 p = find_temp_slot_from_address (XEXP (x, 0));
1081 if (p != 0)
1082 p->addr_taken = 1;
1085 /* If X could be a reference to a temporary slot, mark that slot as
1086 belonging to the to one level higher than the current level. If X
1087 matched one of our slots, just mark that one. Otherwise, we can't
1088 easily predict which it is, so upgrade all of them. Kept slots
1089 need not be touched.
1091 This is called when an ({...}) construct occurs and a statement
1092 returns a value in memory. */
1094 void
1095 preserve_temp_slots (x)
1096 rtx x;
1098 struct temp_slot *p = 0;
1100 /* If there is no result, we still might have some objects whose address
1101 were taken, so we need to make sure they stay around. */
1102 if (x == 0)
1104 for (p = temp_slots; p; p = p->next)
1105 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1106 p->level--;
1108 return;
1111 /* If X is a register that is being used as a pointer, see if we have
1112 a temporary slot we know it points to. To be consistent with
1113 the code below, we really should preserve all non-kept slots
1114 if we can't find a match, but that seems to be much too costly. */
1115 if (GET_CODE (x) == REG && REG_POINTER (x))
1116 p = find_temp_slot_from_address (x);
1118 /* If X is not in memory or is at a constant address, it cannot be in
1119 a temporary slot, but it can contain something whose address was
1120 taken. */
1121 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1123 for (p = temp_slots; p; p = p->next)
1124 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1125 p->level--;
1127 return;
1130 /* First see if we can find a match. */
1131 if (p == 0)
1132 p = find_temp_slot_from_address (XEXP (x, 0));
1134 if (p != 0)
1136 /* Move everything at our level whose address was taken to our new
1137 level in case we used its address. */
1138 struct temp_slot *q;
1140 if (p->level == temp_slot_level)
1142 for (q = temp_slots; q; q = q->next)
1143 if (q != p && q->addr_taken && q->level == p->level)
1144 q->level--;
1146 p->level--;
1147 p->addr_taken = 0;
1149 return;
1152 /* Otherwise, preserve all non-kept slots at this level. */
1153 for (p = temp_slots; p; p = p->next)
1154 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1155 p->level--;
1158 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1159 with that RTL_EXPR, promote it into a temporary slot at the present
1160 level so it will not be freed when we free slots made in the
1161 RTL_EXPR. */
1163 void
1164 preserve_rtl_expr_result (x)
1165 rtx x;
1167 struct temp_slot *p;
1169 /* If X is not in memory or is at a constant address, it cannot be in
1170 a temporary slot. */
1171 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1172 return;
1174 /* If we can find a match, move it to our level unless it is already at
1175 an upper level. */
1176 p = find_temp_slot_from_address (XEXP (x, 0));
1177 if (p != 0)
1179 p->level = MIN (p->level, temp_slot_level);
1180 p->rtl_expr = 0;
1183 return;
1186 /* Free all temporaries used so far. This is normally called at the end
1187 of generating code for a statement. Don't free any temporaries
1188 currently in use for an RTL_EXPR that hasn't yet been emitted.
1189 We could eventually do better than this since it can be reused while
1190 generating the same RTL_EXPR, but this is complex and probably not
1191 worthwhile. */
1193 void
1194 free_temp_slots ()
1196 struct temp_slot *p;
1198 for (p = temp_slots; p; p = p->next)
1199 if (p->in_use && p->level == temp_slot_level && ! p->keep
1200 && p->rtl_expr == 0)
1201 p->in_use = 0;
1203 combine_temp_slots ();
1206 /* Free all temporary slots used in T, an RTL_EXPR node. */
1208 void
1209 free_temps_for_rtl_expr (t)
1210 tree t;
1212 struct temp_slot *p;
1214 for (p = temp_slots; p; p = p->next)
1215 if (p->rtl_expr == t)
1217 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1218 needs to be preserved. This can happen if a temporary in
1219 the RTL_EXPR was addressed; preserve_temp_slots will move
1220 the temporary into a higher level. */
1221 if (temp_slot_level <= p->level)
1222 p->in_use = 0;
1223 else
1224 p->rtl_expr = NULL_TREE;
1227 combine_temp_slots ();
1230 /* Mark all temporaries ever allocated in this function as not suitable
1231 for reuse until the current level is exited. */
1233 void
1234 mark_all_temps_used ()
1236 struct temp_slot *p;
1238 for (p = temp_slots; p; p = p->next)
1240 p->in_use = p->keep = 1;
1241 p->level = MIN (p->level, temp_slot_level);
1245 /* Push deeper into the nesting level for stack temporaries. */
1247 void
1248 push_temp_slots ()
1250 temp_slot_level++;
1253 /* Likewise, but save the new level as the place to allocate variables
1254 for blocks. */
1256 #if 0
1257 void
1258 push_temp_slots_for_block ()
1260 push_temp_slots ();
1262 var_temp_slot_level = temp_slot_level;
1265 /* Likewise, but save the new level as the place to allocate temporaries
1266 for TARGET_EXPRs. */
1268 void
1269 push_temp_slots_for_target ()
1271 push_temp_slots ();
1273 target_temp_slot_level = temp_slot_level;
1276 /* Set and get the value of target_temp_slot_level. The only
1277 permitted use of these functions is to save and restore this value. */
1280 get_target_temp_slot_level ()
1282 return target_temp_slot_level;
1285 void
1286 set_target_temp_slot_level (level)
1287 int level;
1289 target_temp_slot_level = level;
1291 #endif
1293 /* Pop a temporary nesting level. All slots in use in the current level
1294 are freed. */
1296 void
1297 pop_temp_slots ()
1299 struct temp_slot *p;
1301 for (p = temp_slots; p; p = p->next)
1302 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1303 p->in_use = 0;
1305 combine_temp_slots ();
1307 temp_slot_level--;
1310 /* Initialize temporary slots. */
1312 void
1313 init_temp_slots ()
1315 /* We have not allocated any temporaries yet. */
1316 temp_slots = 0;
1317 temp_slot_level = 0;
1318 var_temp_slot_level = 0;
1319 target_temp_slot_level = 0;
1322 /* Retroactively move an auto variable from a register to a stack slot.
1323 This is done when an address-reference to the variable is seen. */
1325 void
1326 put_var_into_stack (decl)
1327 tree decl;
1329 register rtx reg;
1330 enum machine_mode promoted_mode, decl_mode;
1331 struct function *function = 0;
1332 tree context;
1333 int can_use_addressof;
1334 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1335 int usedp = (TREE_USED (decl)
1336 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1338 context = decl_function_context (decl);
1340 /* Get the current rtl used for this object and its original mode. */
1341 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1343 /* No need to do anything if decl has no rtx yet
1344 since in that case caller is setting TREE_ADDRESSABLE
1345 and a stack slot will be assigned when the rtl is made. */
1346 if (reg == 0)
1347 return;
1349 /* Get the declared mode for this object. */
1350 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1351 : DECL_MODE (decl));
1352 /* Get the mode it's actually stored in. */
1353 promoted_mode = GET_MODE (reg);
1355 /* If this variable comes from an outer function,
1356 find that function's saved context. */
1357 if (context != current_function_decl && context != inline_function_decl)
1358 for (function = outer_function_chain; function; function = function->next)
1359 if (function->decl == context)
1360 break;
1362 /* If this is a variable-size object with a pseudo to address it,
1363 put that pseudo into the stack, if the var is nonlocal. */
1364 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1365 && GET_CODE (reg) == MEM
1366 && GET_CODE (XEXP (reg, 0)) == REG
1367 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1369 reg = XEXP (reg, 0);
1370 decl_mode = promoted_mode = GET_MODE (reg);
1373 can_use_addressof
1374 = (function == 0
1375 && optimize > 0
1376 /* FIXME make it work for promoted modes too */
1377 && decl_mode == promoted_mode
1378 #ifdef NON_SAVING_SETJMP
1379 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1380 #endif
1383 /* If we can't use ADDRESSOF, make sure we see through one we already
1384 generated. */
1385 if (! can_use_addressof && GET_CODE (reg) == MEM
1386 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1387 reg = XEXP (XEXP (reg, 0), 0);
1389 /* Now we should have a value that resides in one or more pseudo regs. */
1391 if (GET_CODE (reg) == REG)
1393 /* If this variable lives in the current function and we don't need
1394 to put things in the stack for the sake of setjmp, try to keep it
1395 in a register until we know we actually need the address. */
1396 if (can_use_addressof)
1397 gen_mem_addressof (reg, decl);
1398 else
1399 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1400 decl_mode, volatilep, 0, usedp, 0);
1402 else if (GET_CODE (reg) == CONCAT)
1404 /* A CONCAT contains two pseudos; put them both in the stack.
1405 We do it so they end up consecutive.
1406 We fixup references to the parts only after we fixup references
1407 to the whole CONCAT, lest we do double fixups for the latter
1408 references. */
1409 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1410 tree part_type = type_for_mode (part_mode, 0);
1411 rtx lopart = XEXP (reg, 0);
1412 rtx hipart = XEXP (reg, 1);
1413 #ifdef FRAME_GROWS_DOWNWARD
1414 /* Since part 0 should have a lower address, do it second. */
1415 put_reg_into_stack (function, hipart, part_type, part_mode,
1416 part_mode, volatilep, 0, 0, 0);
1417 put_reg_into_stack (function, lopart, part_type, part_mode,
1418 part_mode, volatilep, 0, 0, 0);
1419 #else
1420 put_reg_into_stack (function, lopart, part_type, part_mode,
1421 part_mode, volatilep, 0, 0, 0);
1422 put_reg_into_stack (function, hipart, part_type, part_mode,
1423 part_mode, volatilep, 0, 0, 0);
1424 #endif
1426 /* Change the CONCAT into a combined MEM for both parts. */
1427 PUT_CODE (reg, MEM);
1428 set_mem_attributes (reg, decl, 1);
1430 /* The two parts are in memory order already.
1431 Use the lower parts address as ours. */
1432 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1433 /* Prevent sharing of rtl that might lose. */
1434 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1435 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1436 if (usedp)
1438 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1439 promoted_mode, 0);
1440 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1441 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1444 else
1445 return;
1447 if (current_function_check_memory_usage)
1448 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1449 3, XEXP (reg, 0), Pmode,
1450 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1451 TYPE_MODE (sizetype),
1452 GEN_INT (MEMORY_USE_RW),
1453 TYPE_MODE (integer_type_node));
1456 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1457 into the stack frame of FUNCTION (0 means the current function).
1458 DECL_MODE is the machine mode of the user-level data type.
1459 PROMOTED_MODE is the machine mode of the register.
1460 VOLATILE_P is nonzero if this is for a "volatile" decl.
1461 USED_P is nonzero if this reg might have already been used in an insn. */
1463 static void
1464 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1465 original_regno, used_p, ht)
1466 struct function *function;
1467 rtx reg;
1468 tree type;
1469 enum machine_mode promoted_mode, decl_mode;
1470 int volatile_p;
1471 unsigned int original_regno;
1472 int used_p;
1473 struct hash_table *ht;
1475 struct function *func = function ? function : cfun;
1476 rtx new = 0;
1477 unsigned int regno = original_regno;
1479 if (regno == 0)
1480 regno = REGNO (reg);
1482 if (regno < func->x_max_parm_reg)
1483 new = func->x_parm_reg_stack_loc[regno];
1485 if (new == 0)
1486 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1488 PUT_CODE (reg, MEM);
1489 PUT_MODE (reg, decl_mode);
1490 XEXP (reg, 0) = XEXP (new, 0);
1491 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1492 MEM_VOLATILE_P (reg) = volatile_p;
1494 /* If this is a memory ref that contains aggregate components,
1495 mark it as such for cse and loop optimize. If we are reusing a
1496 previously generated stack slot, then we need to copy the bit in
1497 case it was set for other reasons. For instance, it is set for
1498 __builtin_va_alist. */
1499 if (type)
1501 MEM_SET_IN_STRUCT_P (reg,
1502 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1503 MEM_ALIAS_SET (reg) = get_alias_set (type);
1505 if (used_p)
1506 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1509 /* Make sure that all refs to the variable, previously made
1510 when it was a register, are fixed up to be valid again.
1511 See function above for meaning of arguments. */
1512 static void
1513 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1514 struct function *function;
1515 rtx reg;
1516 tree type;
1517 enum machine_mode promoted_mode;
1518 struct hash_table *ht;
1520 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1522 if (function != 0)
1524 struct var_refs_queue *temp;
1526 temp
1527 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1528 temp->modified = reg;
1529 temp->promoted_mode = promoted_mode;
1530 temp->unsignedp = unsigned_p;
1531 temp->next = function->fixup_var_refs_queue;
1532 function->fixup_var_refs_queue = temp;
1534 else
1535 /* Variable is local; fix it up now. */
1536 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1539 static void
1540 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1541 rtx var;
1542 enum machine_mode promoted_mode;
1543 int unsignedp;
1544 struct hash_table *ht;
1546 tree pending;
1547 rtx first_insn = get_insns ();
1548 struct sequence_stack *stack = seq_stack;
1549 tree rtl_exps = rtl_expr_chain;
1550 rtx insn;
1552 /* Must scan all insns for stack-refs that exceed the limit. */
1553 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1554 stack == 0, ht);
1555 /* If there's a hash table, it must record all uses of VAR. */
1556 if (ht)
1557 return;
1559 /* Scan all pending sequences too. */
1560 for (; stack; stack = stack->next)
1562 push_to_sequence (stack->first);
1563 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1564 stack->first, stack->next != 0, 0);
1565 /* Update remembered end of sequence
1566 in case we added an insn at the end. */
1567 stack->last = get_last_insn ();
1568 end_sequence ();
1571 /* Scan all waiting RTL_EXPRs too. */
1572 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1574 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1575 if (seq != const0_rtx && seq != 0)
1577 push_to_sequence (seq);
1578 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0, 0);
1579 end_sequence ();
1583 /* Scan the catch clauses for exception handling too. */
1584 push_to_full_sequence (catch_clauses, catch_clauses_last);
1585 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0, 0);
1586 end_full_sequence (&catch_clauses, &catch_clauses_last);
1588 /* Scan sequences saved in CALL_PLACEHOLDERS too. */
1589 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1591 if (GET_CODE (insn) == CALL_INSN
1592 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1594 int i;
1596 /* Look at the Normal call, sibling call and tail recursion
1597 sequences attached to the CALL_PLACEHOLDER. */
1598 for (i = 0; i < 3; i++)
1600 rtx seq = XEXP (PATTERN (insn), i);
1601 if (seq)
1603 push_to_sequence (seq);
1604 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1605 seq, 0, 0);
1606 XEXP (PATTERN (insn), i) = get_insns ();
1607 end_sequence ();
1614 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1615 some part of an insn. Return a struct fixup_replacement whose OLD
1616 value is equal to X. Allocate a new structure if no such entry exists. */
1618 static struct fixup_replacement *
1619 find_fixup_replacement (replacements, x)
1620 struct fixup_replacement **replacements;
1621 rtx x;
1623 struct fixup_replacement *p;
1625 /* See if we have already replaced this. */
1626 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1629 if (p == 0)
1631 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1632 p->old = x;
1633 p->new = 0;
1634 p->next = *replacements;
1635 *replacements = p;
1638 return p;
1641 /* Scan the insn-chain starting with INSN for refs to VAR
1642 and fix them up. TOPLEVEL is nonzero if this chain is the
1643 main chain of insns for the current function. */
1645 static void
1646 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1647 rtx var;
1648 enum machine_mode promoted_mode;
1649 int unsignedp;
1650 rtx insn;
1651 int toplevel;
1652 struct hash_table *ht;
1654 rtx call_dest = 0;
1655 rtx insn_list = NULL_RTX;
1657 /* If we already know which INSNs reference VAR there's no need
1658 to walk the entire instruction chain. */
1659 if (ht)
1661 insn_list = ((struct insns_for_mem_entry *)
1662 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1663 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1664 insn_list = XEXP (insn_list, 1);
1667 while (insn)
1669 rtx next = NEXT_INSN (insn);
1670 rtx set, prev, prev_set;
1671 rtx note;
1673 if (INSN_P (insn))
1675 /* Remember the notes in case we delete the insn. */
1676 note = REG_NOTES (insn);
1678 /* If this is a CLOBBER of VAR, delete it.
1680 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1681 and REG_RETVAL notes too. */
1682 if (GET_CODE (PATTERN (insn)) == CLOBBER
1683 && (XEXP (PATTERN (insn), 0) == var
1684 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1685 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1686 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1688 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1689 /* The REG_LIBCALL note will go away since we are going to
1690 turn INSN into a NOTE, so just delete the
1691 corresponding REG_RETVAL note. */
1692 remove_note (XEXP (note, 0),
1693 find_reg_note (XEXP (note, 0), REG_RETVAL,
1694 NULL_RTX));
1696 /* In unoptimized compilation, we shouldn't call delete_insn
1697 except in jump.c doing warnings. */
1698 PUT_CODE (insn, NOTE);
1699 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1700 NOTE_SOURCE_FILE (insn) = 0;
1703 /* The insn to load VAR from a home in the arglist
1704 is now a no-op. When we see it, just delete it.
1705 Similarly if this is storing VAR from a register from which
1706 it was loaded in the previous insn. This will occur
1707 when an ADDRESSOF was made for an arglist slot. */
1708 else if (toplevel
1709 && (set = single_set (insn)) != 0
1710 && SET_DEST (set) == var
1711 /* If this represents the result of an insn group,
1712 don't delete the insn. */
1713 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1714 && (rtx_equal_p (SET_SRC (set), var)
1715 || (GET_CODE (SET_SRC (set)) == REG
1716 && (prev = prev_nonnote_insn (insn)) != 0
1717 && (prev_set = single_set (prev)) != 0
1718 && SET_DEST (prev_set) == SET_SRC (set)
1719 && rtx_equal_p (SET_SRC (prev_set), var))))
1721 /* In unoptimized compilation, we shouldn't call delete_insn
1722 except in jump.c doing warnings. */
1723 PUT_CODE (insn, NOTE);
1724 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1725 NOTE_SOURCE_FILE (insn) = 0;
1726 if (insn == last_parm_insn)
1727 last_parm_insn = PREV_INSN (next);
1729 else
1731 struct fixup_replacement *replacements = 0;
1732 rtx next_insn = NEXT_INSN (insn);
1734 if (SMALL_REGISTER_CLASSES)
1736 /* If the insn that copies the results of a CALL_INSN
1737 into a pseudo now references VAR, we have to use an
1738 intermediate pseudo since we want the life of the
1739 return value register to be only a single insn.
1741 If we don't use an intermediate pseudo, such things as
1742 address computations to make the address of VAR valid
1743 if it is not can be placed between the CALL_INSN and INSN.
1745 To make sure this doesn't happen, we record the destination
1746 of the CALL_INSN and see if the next insn uses both that
1747 and VAR. */
1749 if (call_dest != 0 && GET_CODE (insn) == INSN
1750 && reg_mentioned_p (var, PATTERN (insn))
1751 && reg_mentioned_p (call_dest, PATTERN (insn)))
1753 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1755 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1757 PATTERN (insn) = replace_rtx (PATTERN (insn),
1758 call_dest, temp);
1761 if (GET_CODE (insn) == CALL_INSN
1762 && GET_CODE (PATTERN (insn)) == SET)
1763 call_dest = SET_DEST (PATTERN (insn));
1764 else if (GET_CODE (insn) == CALL_INSN
1765 && GET_CODE (PATTERN (insn)) == PARALLEL
1766 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1767 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1768 else
1769 call_dest = 0;
1772 /* See if we have to do anything to INSN now that VAR is in
1773 memory. If it needs to be loaded into a pseudo, use a single
1774 pseudo for the entire insn in case there is a MATCH_DUP
1775 between two operands. We pass a pointer to the head of
1776 a list of struct fixup_replacements. If fixup_var_refs_1
1777 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1778 it will record them in this list.
1780 If it allocated a pseudo for any replacement, we copy into
1781 it here. */
1783 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1784 &replacements);
1786 /* If this is last_parm_insn, and any instructions were output
1787 after it to fix it up, then we must set last_parm_insn to
1788 the last such instruction emitted. */
1789 if (insn == last_parm_insn)
1790 last_parm_insn = PREV_INSN (next_insn);
1792 while (replacements)
1794 struct fixup_replacement *next;
1796 if (GET_CODE (replacements->new) == REG)
1798 rtx insert_before;
1799 rtx seq;
1801 /* OLD might be a (subreg (mem)). */
1802 if (GET_CODE (replacements->old) == SUBREG)
1803 replacements->old
1804 = fixup_memory_subreg (replacements->old, insn, 0);
1805 else
1806 replacements->old
1807 = fixup_stack_1 (replacements->old, insn);
1809 insert_before = insn;
1811 /* If we are changing the mode, do a conversion.
1812 This might be wasteful, but combine.c will
1813 eliminate much of the waste. */
1815 if (GET_MODE (replacements->new)
1816 != GET_MODE (replacements->old))
1818 start_sequence ();
1819 convert_move (replacements->new,
1820 replacements->old, unsignedp);
1821 seq = gen_sequence ();
1822 end_sequence ();
1824 else
1825 seq = gen_move_insn (replacements->new,
1826 replacements->old);
1828 emit_insn_before (seq, insert_before);
1831 next = replacements->next;
1832 free (replacements);
1833 replacements = next;
1837 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1838 But don't touch other insns referred to by reg-notes;
1839 we will get them elsewhere. */
1840 while (note)
1842 if (GET_CODE (note) != INSN_LIST)
1843 XEXP (note, 0)
1844 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1845 note = XEXP (note, 1);
1849 if (!ht)
1850 insn = next;
1851 else if (insn_list)
1853 insn = XEXP (insn_list, 0);
1854 insn_list = XEXP (insn_list, 1);
1856 else
1857 insn = NULL_RTX;
1861 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1862 See if the rtx expression at *LOC in INSN needs to be changed.
1864 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1865 contain a list of original rtx's and replacements. If we find that we need
1866 to modify this insn by replacing a memory reference with a pseudo or by
1867 making a new MEM to implement a SUBREG, we consult that list to see if
1868 we have already chosen a replacement. If none has already been allocated,
1869 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1870 or the SUBREG, as appropriate, to the pseudo. */
1872 static void
1873 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1874 register rtx var;
1875 enum machine_mode promoted_mode;
1876 register rtx *loc;
1877 rtx insn;
1878 struct fixup_replacement **replacements;
1880 register int i;
1881 register rtx x = *loc;
1882 RTX_CODE code = GET_CODE (x);
1883 register const char *fmt;
1884 register rtx tem, tem1;
1885 struct fixup_replacement *replacement;
1887 switch (code)
1889 case ADDRESSOF:
1890 if (XEXP (x, 0) == var)
1892 /* Prevent sharing of rtl that might lose. */
1893 rtx sub = copy_rtx (XEXP (var, 0));
1895 if (! validate_change (insn, loc, sub, 0))
1897 rtx y = gen_reg_rtx (GET_MODE (sub));
1898 rtx seq, new_insn;
1900 /* We should be able to replace with a register or all is lost.
1901 Note that we can't use validate_change to verify this, since
1902 we're not caring for replacing all dups simultaneously. */
1903 if (! validate_replace_rtx (*loc, y, insn))
1904 abort ();
1906 /* Careful! First try to recognize a direct move of the
1907 value, mimicking how things are done in gen_reload wrt
1908 PLUS. Consider what happens when insn is a conditional
1909 move instruction and addsi3 clobbers flags. */
1911 start_sequence ();
1912 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1913 seq = gen_sequence ();
1914 end_sequence ();
1916 if (recog_memoized (new_insn) < 0)
1918 /* That failed. Fall back on force_operand and hope. */
1920 start_sequence ();
1921 force_operand (sub, y);
1922 seq = gen_sequence ();
1923 end_sequence ();
1926 #ifdef HAVE_cc0
1927 /* Don't separate setter from user. */
1928 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1929 insn = PREV_INSN (insn);
1930 #endif
1932 emit_insn_before (seq, insn);
1935 return;
1937 case MEM:
1938 if (var == x)
1940 /* If we already have a replacement, use it. Otherwise,
1941 try to fix up this address in case it is invalid. */
1943 replacement = find_fixup_replacement (replacements, var);
1944 if (replacement->new)
1946 *loc = replacement->new;
1947 return;
1950 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1952 /* Unless we are forcing memory to register or we changed the mode,
1953 we can leave things the way they are if the insn is valid. */
1955 INSN_CODE (insn) = -1;
1956 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1957 && recog_memoized (insn) >= 0)
1958 return;
1960 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1961 return;
1964 /* If X contains VAR, we need to unshare it here so that we update
1965 each occurrence separately. But all identical MEMs in one insn
1966 must be replaced with the same rtx because of the possibility of
1967 MATCH_DUPs. */
1969 if (reg_mentioned_p (var, x))
1971 replacement = find_fixup_replacement (replacements, x);
1972 if (replacement->new == 0)
1973 replacement->new = copy_most_rtx (x, var);
1975 *loc = x = replacement->new;
1976 code = GET_CODE (x);
1978 break;
1980 case REG:
1981 case CC0:
1982 case PC:
1983 case CONST_INT:
1984 case CONST:
1985 case SYMBOL_REF:
1986 case LABEL_REF:
1987 case CONST_DOUBLE:
1988 return;
1990 case SIGN_EXTRACT:
1991 case ZERO_EXTRACT:
1992 /* Note that in some cases those types of expressions are altered
1993 by optimize_bit_field, and do not survive to get here. */
1994 if (XEXP (x, 0) == var
1995 || (GET_CODE (XEXP (x, 0)) == SUBREG
1996 && SUBREG_REG (XEXP (x, 0)) == var))
1998 /* Get TEM as a valid MEM in the mode presently in the insn.
2000 We don't worry about the possibility of MATCH_DUP here; it
2001 is highly unlikely and would be tricky to handle. */
2003 tem = XEXP (x, 0);
2004 if (GET_CODE (tem) == SUBREG)
2006 if (GET_MODE_BITSIZE (GET_MODE (tem))
2007 > GET_MODE_BITSIZE (GET_MODE (var)))
2009 replacement = find_fixup_replacement (replacements, var);
2010 if (replacement->new == 0)
2011 replacement->new = gen_reg_rtx (GET_MODE (var));
2012 SUBREG_REG (tem) = replacement->new;
2014 /* The following code works only if we have a MEM, so we
2015 need to handle the subreg here. We directly substitute
2016 it assuming that a subreg must be OK here. We already
2017 scheduled a replacement to copy the mem into the
2018 subreg. */
2019 XEXP (x, 0) = tem;
2020 return;
2022 else
2023 tem = fixup_memory_subreg (tem, insn, 0);
2025 else
2026 tem = fixup_stack_1 (tem, insn);
2028 /* Unless we want to load from memory, get TEM into the proper mode
2029 for an extract from memory. This can only be done if the
2030 extract is at a constant position and length. */
2032 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2033 && GET_CODE (XEXP (x, 2)) == CONST_INT
2034 && ! mode_dependent_address_p (XEXP (tem, 0))
2035 && ! MEM_VOLATILE_P (tem))
2037 enum machine_mode wanted_mode = VOIDmode;
2038 enum machine_mode is_mode = GET_MODE (tem);
2039 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2041 #ifdef HAVE_extzv
2042 if (GET_CODE (x) == ZERO_EXTRACT)
2044 wanted_mode
2045 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2046 if (wanted_mode == VOIDmode)
2047 wanted_mode = word_mode;
2049 #endif
2050 #ifdef HAVE_extv
2051 if (GET_CODE (x) == SIGN_EXTRACT)
2053 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2054 if (wanted_mode == VOIDmode)
2055 wanted_mode = word_mode;
2057 #endif
2058 /* If we have a narrower mode, we can do something. */
2059 if (wanted_mode != VOIDmode
2060 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2062 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2063 rtx old_pos = XEXP (x, 2);
2064 rtx newmem;
2066 /* If the bytes and bits are counted differently, we
2067 must adjust the offset. */
2068 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2069 offset = (GET_MODE_SIZE (is_mode)
2070 - GET_MODE_SIZE (wanted_mode) - offset);
2072 pos %= GET_MODE_BITSIZE (wanted_mode);
2074 newmem = gen_rtx_MEM (wanted_mode,
2075 plus_constant (XEXP (tem, 0), offset));
2076 MEM_COPY_ATTRIBUTES (newmem, tem);
2078 /* Make the change and see if the insn remains valid. */
2079 INSN_CODE (insn) = -1;
2080 XEXP (x, 0) = newmem;
2081 XEXP (x, 2) = GEN_INT (pos);
2083 if (recog_memoized (insn) >= 0)
2084 return;
2086 /* Otherwise, restore old position. XEXP (x, 0) will be
2087 restored later. */
2088 XEXP (x, 2) = old_pos;
2092 /* If we get here, the bitfield extract insn can't accept a memory
2093 reference. Copy the input into a register. */
2095 tem1 = gen_reg_rtx (GET_MODE (tem));
2096 emit_insn_before (gen_move_insn (tem1, tem), insn);
2097 XEXP (x, 0) = tem1;
2098 return;
2100 break;
2102 case SUBREG:
2103 if (SUBREG_REG (x) == var)
2105 /* If this is a special SUBREG made because VAR was promoted
2106 from a wider mode, replace it with VAR and call ourself
2107 recursively, this time saying that the object previously
2108 had its current mode (by virtue of the SUBREG). */
2110 if (SUBREG_PROMOTED_VAR_P (x))
2112 *loc = var;
2113 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2114 return;
2117 /* If this SUBREG makes VAR wider, it has become a paradoxical
2118 SUBREG with VAR in memory, but these aren't allowed at this
2119 stage of the compilation. So load VAR into a pseudo and take
2120 a SUBREG of that pseudo. */
2121 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2123 replacement = find_fixup_replacement (replacements, var);
2124 if (replacement->new == 0)
2125 replacement->new = gen_reg_rtx (GET_MODE (var));
2126 SUBREG_REG (x) = replacement->new;
2127 return;
2130 /* See if we have already found a replacement for this SUBREG.
2131 If so, use it. Otherwise, make a MEM and see if the insn
2132 is recognized. If not, or if we should force MEM into a register,
2133 make a pseudo for this SUBREG. */
2134 replacement = find_fixup_replacement (replacements, x);
2135 if (replacement->new)
2137 *loc = replacement->new;
2138 return;
2141 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2143 INSN_CODE (insn) = -1;
2144 if (! flag_force_mem && recog_memoized (insn) >= 0)
2145 return;
2147 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2148 return;
2150 break;
2152 case SET:
2153 /* First do special simplification of bit-field references. */
2154 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2155 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2156 optimize_bit_field (x, insn, 0);
2157 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2158 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2159 optimize_bit_field (x, insn, NULL_PTR);
2161 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2162 into a register and then store it back out. */
2163 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2164 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2165 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2166 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2167 > GET_MODE_SIZE (GET_MODE (var))))
2169 replacement = find_fixup_replacement (replacements, var);
2170 if (replacement->new == 0)
2171 replacement->new = gen_reg_rtx (GET_MODE (var));
2173 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2174 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2177 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2178 insn into a pseudo and store the low part of the pseudo into VAR. */
2179 if (GET_CODE (SET_DEST (x)) == SUBREG
2180 && SUBREG_REG (SET_DEST (x)) == var
2181 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2182 > GET_MODE_SIZE (GET_MODE (var))))
2184 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2185 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2186 tem)),
2187 insn);
2188 break;
2192 rtx dest = SET_DEST (x);
2193 rtx src = SET_SRC (x);
2194 #ifdef HAVE_insv
2195 rtx outerdest = dest;
2196 #endif
2198 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2199 || GET_CODE (dest) == SIGN_EXTRACT
2200 || GET_CODE (dest) == ZERO_EXTRACT)
2201 dest = XEXP (dest, 0);
2203 if (GET_CODE (src) == SUBREG)
2204 src = XEXP (src, 0);
2206 /* If VAR does not appear at the top level of the SET
2207 just scan the lower levels of the tree. */
2209 if (src != var && dest != var)
2210 break;
2212 /* We will need to rerecognize this insn. */
2213 INSN_CODE (insn) = -1;
2215 #ifdef HAVE_insv
2216 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2218 /* Since this case will return, ensure we fixup all the
2219 operands here. */
2220 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2221 insn, replacements);
2222 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2223 insn, replacements);
2224 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2225 insn, replacements);
2227 tem = XEXP (outerdest, 0);
2229 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2230 that may appear inside a ZERO_EXTRACT.
2231 This was legitimate when the MEM was a REG. */
2232 if (GET_CODE (tem) == SUBREG
2233 && SUBREG_REG (tem) == var)
2234 tem = fixup_memory_subreg (tem, insn, 0);
2235 else
2236 tem = fixup_stack_1 (tem, insn);
2238 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2239 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2240 && ! mode_dependent_address_p (XEXP (tem, 0))
2241 && ! MEM_VOLATILE_P (tem))
2243 enum machine_mode wanted_mode;
2244 enum machine_mode is_mode = GET_MODE (tem);
2245 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2247 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2248 if (wanted_mode == VOIDmode)
2249 wanted_mode = word_mode;
2251 /* If we have a narrower mode, we can do something. */
2252 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2254 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2255 rtx old_pos = XEXP (outerdest, 2);
2256 rtx newmem;
2258 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2259 offset = (GET_MODE_SIZE (is_mode)
2260 - GET_MODE_SIZE (wanted_mode) - offset);
2262 pos %= GET_MODE_BITSIZE (wanted_mode);
2264 newmem = gen_rtx_MEM (wanted_mode,
2265 plus_constant (XEXP (tem, 0),
2266 offset));
2267 MEM_COPY_ATTRIBUTES (newmem, tem);
2269 /* Make the change and see if the insn remains valid. */
2270 INSN_CODE (insn) = -1;
2271 XEXP (outerdest, 0) = newmem;
2272 XEXP (outerdest, 2) = GEN_INT (pos);
2274 if (recog_memoized (insn) >= 0)
2275 return;
2277 /* Otherwise, restore old position. XEXP (x, 0) will be
2278 restored later. */
2279 XEXP (outerdest, 2) = old_pos;
2283 /* If we get here, the bit-field store doesn't allow memory
2284 or isn't located at a constant position. Load the value into
2285 a register, do the store, and put it back into memory. */
2287 tem1 = gen_reg_rtx (GET_MODE (tem));
2288 emit_insn_before (gen_move_insn (tem1, tem), insn);
2289 emit_insn_after (gen_move_insn (tem, tem1), insn);
2290 XEXP (outerdest, 0) = tem1;
2291 return;
2293 #endif
2295 /* STRICT_LOW_PART is a no-op on memory references
2296 and it can cause combinations to be unrecognizable,
2297 so eliminate it. */
2299 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2300 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2302 /* A valid insn to copy VAR into or out of a register
2303 must be left alone, to avoid an infinite loop here.
2304 If the reference to VAR is by a subreg, fix that up,
2305 since SUBREG is not valid for a memref.
2306 Also fix up the address of the stack slot.
2308 Note that we must not try to recognize the insn until
2309 after we know that we have valid addresses and no
2310 (subreg (mem ...) ...) constructs, since these interfere
2311 with determining the validity of the insn. */
2313 if ((SET_SRC (x) == var
2314 || (GET_CODE (SET_SRC (x)) == SUBREG
2315 && SUBREG_REG (SET_SRC (x)) == var))
2316 && (GET_CODE (SET_DEST (x)) == REG
2317 || (GET_CODE (SET_DEST (x)) == SUBREG
2318 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2319 && GET_MODE (var) == promoted_mode
2320 && x == single_set (insn))
2322 rtx pat, last;
2324 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2325 if (replacement->new)
2326 SET_SRC (x) = replacement->new;
2327 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2328 SET_SRC (x) = replacement->new
2329 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2330 else
2331 SET_SRC (x) = replacement->new
2332 = fixup_stack_1 (SET_SRC (x), insn);
2334 if (recog_memoized (insn) >= 0)
2335 return;
2337 /* INSN is not valid, but we know that we want to
2338 copy SET_SRC (x) to SET_DEST (x) in some way. So
2339 we generate the move and see whether it requires more
2340 than one insn. If it does, we emit those insns and
2341 delete INSN. Otherwise, we an just replace the pattern
2342 of INSN; we have already verified above that INSN has
2343 no other function that to do X. */
2345 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2346 if (GET_CODE (pat) == SEQUENCE)
2348 last = emit_insn_before (pat, insn);
2350 /* INSN might have REG_RETVAL or other important notes, so
2351 we need to store the pattern of the last insn in the
2352 sequence into INSN similarly to the normal case. LAST
2353 should not have REG_NOTES, but we allow them if INSN has
2354 no REG_NOTES. */
2355 if (REG_NOTES (last) && REG_NOTES (insn))
2356 abort ();
2357 if (REG_NOTES (last))
2358 REG_NOTES (insn) = REG_NOTES (last);
2359 PATTERN (insn) = PATTERN (last);
2361 PUT_CODE (last, NOTE);
2362 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2363 NOTE_SOURCE_FILE (last) = 0;
2365 else
2366 PATTERN (insn) = pat;
2368 return;
2371 if ((SET_DEST (x) == var
2372 || (GET_CODE (SET_DEST (x)) == SUBREG
2373 && SUBREG_REG (SET_DEST (x)) == var))
2374 && (GET_CODE (SET_SRC (x)) == REG
2375 || (GET_CODE (SET_SRC (x)) == SUBREG
2376 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2377 && GET_MODE (var) == promoted_mode
2378 && x == single_set (insn))
2380 rtx pat, last;
2382 if (GET_CODE (SET_DEST (x)) == SUBREG)
2383 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2384 else
2385 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2387 if (recog_memoized (insn) >= 0)
2388 return;
2390 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2391 if (GET_CODE (pat) == SEQUENCE)
2393 last = emit_insn_before (pat, insn);
2395 /* INSN might have REG_RETVAL or other important notes, so
2396 we need to store the pattern of the last insn in the
2397 sequence into INSN similarly to the normal case. LAST
2398 should not have REG_NOTES, but we allow them if INSN has
2399 no REG_NOTES. */
2400 if (REG_NOTES (last) && REG_NOTES (insn))
2401 abort ();
2402 if (REG_NOTES (last))
2403 REG_NOTES (insn) = REG_NOTES (last);
2404 PATTERN (insn) = PATTERN (last);
2406 PUT_CODE (last, NOTE);
2407 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2408 NOTE_SOURCE_FILE (last) = 0;
2410 else
2411 PATTERN (insn) = pat;
2413 return;
2416 /* Otherwise, storing into VAR must be handled specially
2417 by storing into a temporary and copying that into VAR
2418 with a new insn after this one. Note that this case
2419 will be used when storing into a promoted scalar since
2420 the insn will now have different modes on the input
2421 and output and hence will be invalid (except for the case
2422 of setting it to a constant, which does not need any
2423 change if it is valid). We generate extra code in that case,
2424 but combine.c will eliminate it. */
2426 if (dest == var)
2428 rtx temp;
2429 rtx fixeddest = SET_DEST (x);
2431 /* STRICT_LOW_PART can be discarded, around a MEM. */
2432 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2433 fixeddest = XEXP (fixeddest, 0);
2434 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2435 if (GET_CODE (fixeddest) == SUBREG)
2437 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2438 promoted_mode = GET_MODE (fixeddest);
2440 else
2441 fixeddest = fixup_stack_1 (fixeddest, insn);
2443 temp = gen_reg_rtx (promoted_mode);
2445 emit_insn_after (gen_move_insn (fixeddest,
2446 gen_lowpart (GET_MODE (fixeddest),
2447 temp)),
2448 insn);
2450 SET_DEST (x) = temp;
2454 default:
2455 break;
2458 /* Nothing special about this RTX; fix its operands. */
2460 fmt = GET_RTX_FORMAT (code);
2461 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2463 if (fmt[i] == 'e')
2464 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2465 else if (fmt[i] == 'E')
2467 register int j;
2468 for (j = 0; j < XVECLEN (x, i); j++)
2469 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2470 insn, replacements);
2475 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2476 return an rtx (MEM:m1 newaddr) which is equivalent.
2477 If any insns must be emitted to compute NEWADDR, put them before INSN.
2479 UNCRITICAL nonzero means accept paradoxical subregs.
2480 This is used for subregs found inside REG_NOTES. */
2482 static rtx
2483 fixup_memory_subreg (x, insn, uncritical)
2484 rtx x;
2485 rtx insn;
2486 int uncritical;
2488 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2489 rtx addr = XEXP (SUBREG_REG (x), 0);
2490 enum machine_mode mode = GET_MODE (x);
2491 rtx result;
2493 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2494 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2495 && ! uncritical)
2496 abort ();
2498 if (BYTES_BIG_ENDIAN)
2499 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2500 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2501 addr = plus_constant (addr, offset);
2502 if (!flag_force_addr && memory_address_p (mode, addr))
2503 /* Shortcut if no insns need be emitted. */
2504 return change_address (SUBREG_REG (x), mode, addr);
2505 start_sequence ();
2506 result = change_address (SUBREG_REG (x), mode, addr);
2507 emit_insn_before (gen_sequence (), insn);
2508 end_sequence ();
2509 return result;
2512 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2513 Replace subexpressions of X in place.
2514 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2515 Otherwise return X, with its contents possibly altered.
2517 If any insns must be emitted to compute NEWADDR, put them before INSN.
2519 UNCRITICAL is as in fixup_memory_subreg. */
2521 static rtx
2522 walk_fixup_memory_subreg (x, insn, uncritical)
2523 register rtx x;
2524 rtx insn;
2525 int uncritical;
2527 register enum rtx_code code;
2528 register const char *fmt;
2529 register int i;
2531 if (x == 0)
2532 return 0;
2534 code = GET_CODE (x);
2536 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2537 return fixup_memory_subreg (x, insn, uncritical);
2539 /* Nothing special about this RTX; fix its operands. */
2541 fmt = GET_RTX_FORMAT (code);
2542 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2544 if (fmt[i] == 'e')
2545 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2546 else if (fmt[i] == 'E')
2548 register int j;
2549 for (j = 0; j < XVECLEN (x, i); j++)
2550 XVECEXP (x, i, j)
2551 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2554 return x;
2557 /* For each memory ref within X, if it refers to a stack slot
2558 with an out of range displacement, put the address in a temp register
2559 (emitting new insns before INSN to load these registers)
2560 and alter the memory ref to use that register.
2561 Replace each such MEM rtx with a copy, to avoid clobberage. */
2563 static rtx
2564 fixup_stack_1 (x, insn)
2565 rtx x;
2566 rtx insn;
2568 register int i;
2569 register RTX_CODE code = GET_CODE (x);
2570 register const char *fmt;
2572 if (code == MEM)
2574 register rtx ad = XEXP (x, 0);
2575 /* If we have address of a stack slot but it's not valid
2576 (displacement is too large), compute the sum in a register. */
2577 if (GET_CODE (ad) == PLUS
2578 && GET_CODE (XEXP (ad, 0)) == REG
2579 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2580 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2581 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2582 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2583 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2584 #endif
2585 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2586 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2587 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2588 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2590 rtx temp, seq;
2591 if (memory_address_p (GET_MODE (x), ad))
2592 return x;
2594 start_sequence ();
2595 temp = copy_to_reg (ad);
2596 seq = gen_sequence ();
2597 end_sequence ();
2598 emit_insn_before (seq, insn);
2599 return change_address (x, VOIDmode, temp);
2601 return x;
2604 fmt = GET_RTX_FORMAT (code);
2605 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2607 if (fmt[i] == 'e')
2608 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2609 else if (fmt[i] == 'E')
2611 register int j;
2612 for (j = 0; j < XVECLEN (x, i); j++)
2613 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2616 return x;
2619 /* Optimization: a bit-field instruction whose field
2620 happens to be a byte or halfword in memory
2621 can be changed to a move instruction.
2623 We call here when INSN is an insn to examine or store into a bit-field.
2624 BODY is the SET-rtx to be altered.
2626 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2627 (Currently this is called only from function.c, and EQUIV_MEM
2628 is always 0.) */
2630 static void
2631 optimize_bit_field (body, insn, equiv_mem)
2632 rtx body;
2633 rtx insn;
2634 rtx *equiv_mem;
2636 register rtx bitfield;
2637 int destflag;
2638 rtx seq = 0;
2639 enum machine_mode mode;
2641 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2642 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2643 bitfield = SET_DEST (body), destflag = 1;
2644 else
2645 bitfield = SET_SRC (body), destflag = 0;
2647 /* First check that the field being stored has constant size and position
2648 and is in fact a byte or halfword suitably aligned. */
2650 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2651 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2652 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2653 != BLKmode)
2654 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2656 register rtx memref = 0;
2658 /* Now check that the containing word is memory, not a register,
2659 and that it is safe to change the machine mode. */
2661 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2662 memref = XEXP (bitfield, 0);
2663 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2664 && equiv_mem != 0)
2665 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2666 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2667 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2668 memref = SUBREG_REG (XEXP (bitfield, 0));
2669 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2670 && equiv_mem != 0
2671 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2672 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2674 if (memref
2675 && ! mode_dependent_address_p (XEXP (memref, 0))
2676 && ! MEM_VOLATILE_P (memref))
2678 /* Now adjust the address, first for any subreg'ing
2679 that we are now getting rid of,
2680 and then for which byte of the word is wanted. */
2682 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2683 rtx insns;
2685 /* Adjust OFFSET to count bits from low-address byte. */
2686 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2687 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2688 - offset - INTVAL (XEXP (bitfield, 1)));
2690 /* Adjust OFFSET to count bytes from low-address byte. */
2691 offset /= BITS_PER_UNIT;
2692 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2694 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2695 if (BYTES_BIG_ENDIAN)
2696 offset -= (MIN (UNITS_PER_WORD,
2697 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2698 - MIN (UNITS_PER_WORD,
2699 GET_MODE_SIZE (GET_MODE (memref))));
2702 start_sequence ();
2703 memref = change_address (memref, mode,
2704 plus_constant (XEXP (memref, 0), offset));
2705 insns = get_insns ();
2706 end_sequence ();
2707 emit_insns_before (insns, insn);
2709 /* Store this memory reference where
2710 we found the bit field reference. */
2712 if (destflag)
2714 validate_change (insn, &SET_DEST (body), memref, 1);
2715 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2717 rtx src = SET_SRC (body);
2718 while (GET_CODE (src) == SUBREG
2719 && SUBREG_WORD (src) == 0)
2720 src = SUBREG_REG (src);
2721 if (GET_MODE (src) != GET_MODE (memref))
2722 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2723 validate_change (insn, &SET_SRC (body), src, 1);
2725 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2726 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2727 /* This shouldn't happen because anything that didn't have
2728 one of these modes should have got converted explicitly
2729 and then referenced through a subreg.
2730 This is so because the original bit-field was
2731 handled by agg_mode and so its tree structure had
2732 the same mode that memref now has. */
2733 abort ();
2735 else
2737 rtx dest = SET_DEST (body);
2739 while (GET_CODE (dest) == SUBREG
2740 && SUBREG_WORD (dest) == 0
2741 && (GET_MODE_CLASS (GET_MODE (dest))
2742 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2743 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2744 <= UNITS_PER_WORD))
2745 dest = SUBREG_REG (dest);
2747 validate_change (insn, &SET_DEST (body), dest, 1);
2749 if (GET_MODE (dest) == GET_MODE (memref))
2750 validate_change (insn, &SET_SRC (body), memref, 1);
2751 else
2753 /* Convert the mem ref to the destination mode. */
2754 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2756 start_sequence ();
2757 convert_move (newreg, memref,
2758 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2759 seq = get_insns ();
2760 end_sequence ();
2762 validate_change (insn, &SET_SRC (body), newreg, 1);
2766 /* See if we can convert this extraction or insertion into
2767 a simple move insn. We might not be able to do so if this
2768 was, for example, part of a PARALLEL.
2770 If we succeed, write out any needed conversions. If we fail,
2771 it is hard to guess why we failed, so don't do anything
2772 special; just let the optimization be suppressed. */
2774 if (apply_change_group () && seq)
2775 emit_insns_before (seq, insn);
2780 /* These routines are responsible for converting virtual register references
2781 to the actual hard register references once RTL generation is complete.
2783 The following four variables are used for communication between the
2784 routines. They contain the offsets of the virtual registers from their
2785 respective hard registers. */
2787 static int in_arg_offset;
2788 static int var_offset;
2789 static int dynamic_offset;
2790 static int out_arg_offset;
2791 static int cfa_offset;
2793 /* In most machines, the stack pointer register is equivalent to the bottom
2794 of the stack. */
2796 #ifndef STACK_POINTER_OFFSET
2797 #define STACK_POINTER_OFFSET 0
2798 #endif
2800 /* If not defined, pick an appropriate default for the offset of dynamically
2801 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2802 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2804 #ifndef STACK_DYNAMIC_OFFSET
2806 /* The bottom of the stack points to the actual arguments. If
2807 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2808 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2809 stack space for register parameters is not pushed by the caller, but
2810 rather part of the fixed stack areas and hence not included in
2811 `current_function_outgoing_args_size'. Nevertheless, we must allow
2812 for it when allocating stack dynamic objects. */
2814 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2815 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2816 ((ACCUMULATE_OUTGOING_ARGS \
2817 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2818 + (STACK_POINTER_OFFSET)) \
2820 #else
2821 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2822 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2823 + (STACK_POINTER_OFFSET))
2824 #endif
2825 #endif
2827 /* On most machines, the CFA coincides with the first incoming parm. */
2829 #ifndef ARG_POINTER_CFA_OFFSET
2830 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2831 #endif
2833 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2834 its address taken. DECL is the decl for the object stored in the
2835 register, for later use if we do need to force REG into the stack.
2836 REG is overwritten by the MEM like in put_reg_into_stack. */
2839 gen_mem_addressof (reg, decl)
2840 rtx reg;
2841 tree decl;
2843 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2844 REGNO (reg), decl);
2846 /* If the original REG was a user-variable, then so is the REG whose
2847 address is being taken. Likewise for unchanging. */
2848 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2849 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2851 PUT_CODE (reg, MEM);
2852 XEXP (reg, 0) = r;
2853 if (decl)
2855 tree type = TREE_TYPE (decl);
2857 PUT_MODE (reg, DECL_MODE (decl));
2858 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2859 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2860 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2862 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2863 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2865 else
2867 /* We have no alias information about this newly created MEM. */
2868 MEM_ALIAS_SET (reg) = 0;
2870 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2873 return reg;
2876 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2878 void
2879 flush_addressof (decl)
2880 tree decl;
2882 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2883 && DECL_RTL (decl) != 0
2884 && GET_CODE (DECL_RTL (decl)) == MEM
2885 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2886 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2887 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2890 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2892 static void
2893 put_addressof_into_stack (r, ht)
2894 rtx r;
2895 struct hash_table *ht;
2897 tree decl, type;
2898 int volatile_p, used_p;
2900 rtx reg = XEXP (r, 0);
2902 if (GET_CODE (reg) != REG)
2903 abort ();
2905 decl = ADDRESSOF_DECL (r);
2906 if (decl)
2908 type = TREE_TYPE (decl);
2909 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2910 && TREE_THIS_VOLATILE (decl));
2911 used_p = (TREE_USED (decl)
2912 || (TREE_CODE (decl) != SAVE_EXPR
2913 && DECL_INITIAL (decl) != 0));
2915 else
2917 type = NULL_TREE;
2918 volatile_p = 0;
2919 used_p = 1;
2922 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2923 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2926 /* List of replacements made below in purge_addressof_1 when creating
2927 bitfield insertions. */
2928 static rtx purge_bitfield_addressof_replacements;
2930 /* List of replacements made below in purge_addressof_1 for patterns
2931 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2932 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2933 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2934 enough in complex cases, e.g. when some field values can be
2935 extracted by usage MEM with narrower mode. */
2936 static rtx purge_addressof_replacements;
2938 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2939 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2940 the stack. If the function returns FALSE then the replacement could not
2941 be made. */
2943 static boolean
2944 purge_addressof_1 (loc, insn, force, store, ht)
2945 rtx *loc;
2946 rtx insn;
2947 int force, store;
2948 struct hash_table *ht;
2950 rtx x;
2951 RTX_CODE code;
2952 int i, j;
2953 const char *fmt;
2954 boolean result = true;
2956 /* Re-start here to avoid recursion in common cases. */
2957 restart:
2959 x = *loc;
2960 if (x == 0)
2961 return true;
2963 code = GET_CODE (x);
2965 /* If we don't return in any of the cases below, we will recurse inside
2966 the RTX, which will normally result in any ADDRESSOF being forced into
2967 memory. */
2968 if (code == SET)
2970 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2971 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2972 return result;
2975 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2977 /* We must create a copy of the rtx because it was created by
2978 overwriting a REG rtx which is always shared. */
2979 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2980 rtx insns;
2982 if (validate_change (insn, loc, sub, 0)
2983 || validate_replace_rtx (x, sub, insn))
2984 return true;
2986 start_sequence ();
2987 sub = force_operand (sub, NULL_RTX);
2988 if (! validate_change (insn, loc, sub, 0)
2989 && ! validate_replace_rtx (x, sub, insn))
2990 abort ();
2992 insns = gen_sequence ();
2993 end_sequence ();
2994 emit_insn_before (insns, insn);
2995 return true;
2998 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3000 rtx sub = XEXP (XEXP (x, 0), 0);
3001 rtx sub2;
3003 if (GET_CODE (sub) == MEM)
3005 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3006 MEM_COPY_ATTRIBUTES (sub2, sub);
3007 sub = sub2;
3009 else if (GET_CODE (sub) == REG
3010 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3012 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3014 int size_x, size_sub;
3016 if (!insn)
3018 /* When processing REG_NOTES look at the list of
3019 replacements done on the insn to find the register that X
3020 was replaced by. */
3021 rtx tem;
3023 for (tem = purge_bitfield_addressof_replacements;
3024 tem != NULL_RTX;
3025 tem = XEXP (XEXP (tem, 1), 1))
3026 if (rtx_equal_p (x, XEXP (tem, 0)))
3028 *loc = XEXP (XEXP (tem, 1), 0);
3029 return true;
3032 /* See comment for purge_addressof_replacements. */
3033 for (tem = purge_addressof_replacements;
3034 tem != NULL_RTX;
3035 tem = XEXP (XEXP (tem, 1), 1))
3036 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3038 rtx z = XEXP (XEXP (tem, 1), 0);
3040 if (GET_MODE (x) == GET_MODE (z)
3041 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3042 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3043 abort ();
3045 /* It can happen that the note may speak of things
3046 in a wider (or just different) mode than the
3047 code did. This is especially true of
3048 REG_RETVAL. */
3050 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3051 z = SUBREG_REG (z);
3053 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3054 && (GET_MODE_SIZE (GET_MODE (x))
3055 > GET_MODE_SIZE (GET_MODE (z))))
3057 /* This can occur as a result in invalid
3058 pointer casts, e.g. float f; ...
3059 *(long long int *)&f.
3060 ??? We could emit a warning here, but
3061 without a line number that wouldn't be
3062 very helpful. */
3063 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3065 else
3066 z = gen_lowpart (GET_MODE (x), z);
3068 *loc = z;
3069 return true;
3072 /* Sometimes we may not be able to find the replacement. For
3073 example when the original insn was a MEM in a wider mode,
3074 and the note is part of a sign extension of a narrowed
3075 version of that MEM. Gcc testcase compile/990829-1.c can
3076 generate an example of this siutation. Rather than complain
3077 we return false, which will prompt our caller to remove the
3078 offending note. */
3079 return false;
3082 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3083 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3085 /* Don't even consider working with paradoxical subregs,
3086 or the moral equivalent seen here. */
3087 if (size_x <= size_sub
3088 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3090 /* Do a bitfield insertion to mirror what would happen
3091 in memory. */
3093 rtx val, seq;
3095 if (store)
3097 rtx p = PREV_INSN (insn);
3099 start_sequence ();
3100 val = gen_reg_rtx (GET_MODE (x));
3101 if (! validate_change (insn, loc, val, 0))
3103 /* Discard the current sequence and put the
3104 ADDRESSOF on stack. */
3105 end_sequence ();
3106 goto give_up;
3108 seq = gen_sequence ();
3109 end_sequence ();
3110 emit_insn_before (seq, insn);
3111 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3112 insn, ht);
3114 start_sequence ();
3115 store_bit_field (sub, size_x, 0, GET_MODE (x),
3116 val, GET_MODE_SIZE (GET_MODE (sub)),
3117 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3119 /* Make sure to unshare any shared rtl that store_bit_field
3120 might have created. */
3121 unshare_all_rtl_again (get_insns ());
3123 seq = gen_sequence ();
3124 end_sequence ();
3125 p = emit_insn_after (seq, insn);
3126 if (NEXT_INSN (insn))
3127 compute_insns_for_mem (NEXT_INSN (insn),
3128 p ? NEXT_INSN (p) : NULL_RTX,
3129 ht);
3131 else
3133 rtx p = PREV_INSN (insn);
3135 start_sequence ();
3136 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3137 GET_MODE (x), GET_MODE (x),
3138 GET_MODE_SIZE (GET_MODE (sub)),
3139 GET_MODE_SIZE (GET_MODE (sub)));
3141 if (! validate_change (insn, loc, val, 0))
3143 /* Discard the current sequence and put the
3144 ADDRESSOF on stack. */
3145 end_sequence ();
3146 goto give_up;
3149 seq = gen_sequence ();
3150 end_sequence ();
3151 emit_insn_before (seq, insn);
3152 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3153 insn, ht);
3156 /* Remember the replacement so that the same one can be done
3157 on the REG_NOTES. */
3158 purge_bitfield_addressof_replacements
3159 = gen_rtx_EXPR_LIST (VOIDmode, x,
3160 gen_rtx_EXPR_LIST
3161 (VOIDmode, val,
3162 purge_bitfield_addressof_replacements));
3164 /* We replaced with a reg -- all done. */
3165 return true;
3169 else if (validate_change (insn, loc, sub, 0))
3171 /* Remember the replacement so that the same one can be done
3172 on the REG_NOTES. */
3173 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3175 rtx tem;
3177 for (tem = purge_addressof_replacements;
3178 tem != NULL_RTX;
3179 tem = XEXP (XEXP (tem, 1), 1))
3180 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3182 XEXP (XEXP (tem, 1), 0) = sub;
3183 return true;
3185 purge_addressof_replacements
3186 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3187 gen_rtx_EXPR_LIST (VOIDmode, sub,
3188 purge_addressof_replacements));
3189 return true;
3191 goto restart;
3193 give_up:;
3194 /* else give up and put it into the stack */
3197 else if (code == ADDRESSOF)
3199 put_addressof_into_stack (x, ht);
3200 return true;
3202 else if (code == SET)
3204 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3205 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3206 return result;
3209 /* Scan all subexpressions. */
3210 fmt = GET_RTX_FORMAT (code);
3211 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3213 if (*fmt == 'e')
3214 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3215 else if (*fmt == 'E')
3216 for (j = 0; j < XVECLEN (x, i); j++)
3217 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3220 return result;
3223 /* Return a new hash table entry in HT. */
3225 static struct hash_entry *
3226 insns_for_mem_newfunc (he, ht, k)
3227 struct hash_entry *he;
3228 struct hash_table *ht;
3229 hash_table_key k ATTRIBUTE_UNUSED;
3231 struct insns_for_mem_entry *ifmhe;
3232 if (he)
3233 return he;
3235 ifmhe = ((struct insns_for_mem_entry *)
3236 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3237 ifmhe->insns = NULL_RTX;
3239 return &ifmhe->he;
3242 /* Return a hash value for K, a REG. */
3244 static unsigned long
3245 insns_for_mem_hash (k)
3246 hash_table_key k;
3248 /* K is really a RTX. Just use the address as the hash value. */
3249 return (unsigned long) k;
3252 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3254 static boolean
3255 insns_for_mem_comp (k1, k2)
3256 hash_table_key k1;
3257 hash_table_key k2;
3259 return k1 == k2;
3262 struct insns_for_mem_walk_info {
3263 /* The hash table that we are using to record which INSNs use which
3264 MEMs. */
3265 struct hash_table *ht;
3267 /* The INSN we are currently proessing. */
3268 rtx insn;
3270 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3271 to find the insns that use the REGs in the ADDRESSOFs. */
3272 int pass;
3275 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3276 that might be used in an ADDRESSOF expression, record this INSN in
3277 the hash table given by DATA (which is really a pointer to an
3278 insns_for_mem_walk_info structure). */
3280 static int
3281 insns_for_mem_walk (r, data)
3282 rtx *r;
3283 void *data;
3285 struct insns_for_mem_walk_info *ifmwi
3286 = (struct insns_for_mem_walk_info *) data;
3288 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3289 && GET_CODE (XEXP (*r, 0)) == REG)
3290 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3291 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3293 /* Lookup this MEM in the hashtable, creating it if necessary. */
3294 struct insns_for_mem_entry *ifme
3295 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3297 /*create=*/0,
3298 /*copy=*/0);
3300 /* If we have not already recorded this INSN, do so now. Since
3301 we process the INSNs in order, we know that if we have
3302 recorded it it must be at the front of the list. */
3303 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3304 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3305 ifme->insns);
3308 return 0;
3311 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3312 which REGs in HT. */
3314 static void
3315 compute_insns_for_mem (insns, last_insn, ht)
3316 rtx insns;
3317 rtx last_insn;
3318 struct hash_table *ht;
3320 rtx insn;
3321 struct insns_for_mem_walk_info ifmwi;
3322 ifmwi.ht = ht;
3324 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3325 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3326 if (INSN_P (insn))
3328 ifmwi.insn = insn;
3329 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3333 /* Helper function for purge_addressof called through for_each_rtx.
3334 Returns true iff the rtl is an ADDRESSOF. */
3335 static int
3336 is_addressof (rtl, data)
3337 rtx *rtl;
3338 void *data ATTRIBUTE_UNUSED;
3340 return GET_CODE (*rtl) == ADDRESSOF;
3343 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3344 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3345 stack. */
3347 void
3348 purge_addressof (insns)
3349 rtx insns;
3351 rtx insn;
3352 struct hash_table ht;
3354 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3355 requires a fixup pass over the instruction stream to correct
3356 INSNs that depended on the REG being a REG, and not a MEM. But,
3357 these fixup passes are slow. Furthermore, most MEMs are not
3358 mentioned in very many instructions. So, we speed up the process
3359 by pre-calculating which REGs occur in which INSNs; that allows
3360 us to perform the fixup passes much more quickly. */
3361 hash_table_init (&ht,
3362 insns_for_mem_newfunc,
3363 insns_for_mem_hash,
3364 insns_for_mem_comp);
3365 compute_insns_for_mem (insns, NULL_RTX, &ht);
3367 for (insn = insns; insn; insn = NEXT_INSN (insn))
3368 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3369 || GET_CODE (insn) == CALL_INSN)
3371 if (! purge_addressof_1 (&PATTERN (insn), insn,
3372 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3373 /* If we could not replace the ADDRESSOFs in the insn,
3374 something is wrong. */
3375 abort ();
3377 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3379 /* If we could not replace the ADDRESSOFs in the insn's notes,
3380 we can just remove the offending notes instead. */
3381 rtx note;
3383 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3385 /* If we find a REG_RETVAL note then the insn is a libcall.
3386 Such insns must have REG_EQUAL notes as well, in order
3387 for later passes of the compiler to work. So it is not
3388 safe to delete the notes here, and instead we abort. */
3389 if (REG_NOTE_KIND (note) == REG_RETVAL)
3390 abort ();
3391 if (for_each_rtx (&note, is_addressof, NULL))
3392 remove_note (insn, note);
3397 /* Clean up. */
3398 hash_table_free (&ht);
3399 purge_bitfield_addressof_replacements = 0;
3400 purge_addressof_replacements = 0;
3402 /* REGs are shared. purge_addressof will destructively replace a REG
3403 with a MEM, which creates shared MEMs.
3405 Unfortunately, the children of put_reg_into_stack assume that MEMs
3406 referring to the same stack slot are shared (fixup_var_refs and
3407 the associated hash table code).
3409 So, we have to do another unsharing pass after we have flushed any
3410 REGs that had their address taken into the stack.
3412 It may be worth tracking whether or not we converted any REGs into
3413 MEMs to avoid this overhead when it is not needed. */
3414 unshare_all_rtl_again (get_insns ());
3417 /* Convert a SET of a hard subreg to a set of the appropriet hard
3418 register. A subroutine of purge_hard_subreg_sets. */
3420 static void
3421 purge_single_hard_subreg_set (pattern)
3422 rtx pattern;
3424 rtx reg = SET_DEST (pattern);
3425 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3426 int word = 0;
3428 while (GET_CODE (reg) == SUBREG)
3430 word += SUBREG_WORD (reg);
3431 reg = SUBREG_REG (reg);
3434 if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
3436 reg = gen_rtx_REG (mode, REGNO (reg) + word);
3437 SET_DEST (pattern) = reg;
3441 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3442 only such SETs that we expect to see are those left in because
3443 integrate can't handle sets of parts of a return value register.
3445 We don't use alter_subreg because we only want to eliminate subregs
3446 of hard registers. */
3448 void
3449 purge_hard_subreg_sets (insn)
3450 rtx insn;
3452 for (; insn; insn = NEXT_INSN (insn))
3454 if (INSN_P (insn))
3456 rtx pattern = PATTERN (insn);
3457 switch (GET_CODE (pattern))
3459 case SET:
3460 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3461 purge_single_hard_subreg_set (pattern);
3462 break;
3463 case PARALLEL:
3465 int j;
3466 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3468 rtx inner_pattern = XVECEXP (pattern, 0, j);
3469 if (GET_CODE (inner_pattern) == SET
3470 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3471 purge_single_hard_subreg_set (inner_pattern);
3474 break;
3475 default:
3476 break;
3482 /* Pass through the INSNS of function FNDECL and convert virtual register
3483 references to hard register references. */
3485 void
3486 instantiate_virtual_regs (fndecl, insns)
3487 tree fndecl;
3488 rtx insns;
3490 rtx insn;
3491 unsigned int i;
3493 /* Compute the offsets to use for this function. */
3494 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3495 var_offset = STARTING_FRAME_OFFSET;
3496 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3497 out_arg_offset = STACK_POINTER_OFFSET;
3498 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3500 /* Scan all variables and parameters of this function. For each that is
3501 in memory, instantiate all virtual registers if the result is a valid
3502 address. If not, we do it later. That will handle most uses of virtual
3503 regs on many machines. */
3504 instantiate_decls (fndecl, 1);
3506 /* Initialize recognition, indicating that volatile is OK. */
3507 init_recog ();
3509 /* Scan through all the insns, instantiating every virtual register still
3510 present. */
3511 for (insn = insns; insn; insn = NEXT_INSN (insn))
3512 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3513 || GET_CODE (insn) == CALL_INSN)
3515 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3516 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3519 /* Instantiate the stack slots for the parm registers, for later use in
3520 addressof elimination. */
3521 for (i = 0; i < max_parm_reg; ++i)
3522 if (parm_reg_stack_loc[i])
3523 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3525 /* Now instantiate the remaining register equivalences for debugging info.
3526 These will not be valid addresses. */
3527 instantiate_decls (fndecl, 0);
3529 /* Indicate that, from now on, assign_stack_local should use
3530 frame_pointer_rtx. */
3531 virtuals_instantiated = 1;
3534 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3535 all virtual registers in their DECL_RTL's.
3537 If VALID_ONLY, do this only if the resulting address is still valid.
3538 Otherwise, always do it. */
3540 static void
3541 instantiate_decls (fndecl, valid_only)
3542 tree fndecl;
3543 int valid_only;
3545 tree decl;
3547 /* Process all parameters of the function. */
3548 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3550 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3552 instantiate_decl (DECL_RTL (decl), size, valid_only);
3554 /* If the parameter was promoted, then the incoming RTL mode may be
3555 larger than the declared type size. We must use the larger of
3556 the two sizes. */
3557 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3558 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3561 /* Now process all variables defined in the function or its subblocks. */
3562 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3565 /* Subroutine of instantiate_decls: Process all decls in the given
3566 BLOCK node and all its subblocks. */
3568 static void
3569 instantiate_decls_1 (let, valid_only)
3570 tree let;
3571 int valid_only;
3573 tree t;
3575 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3576 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3577 valid_only);
3579 /* Process all subblocks. */
3580 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3581 instantiate_decls_1 (t, valid_only);
3584 /* Subroutine of the preceding procedures: Given RTL representing a
3585 decl and the size of the object, do any instantiation required.
3587 If VALID_ONLY is non-zero, it means that the RTL should only be
3588 changed if the new address is valid. */
3590 static void
3591 instantiate_decl (x, size, valid_only)
3592 rtx x;
3593 HOST_WIDE_INT size;
3594 int valid_only;
3596 enum machine_mode mode;
3597 rtx addr;
3599 /* If this is not a MEM, no need to do anything. Similarly if the
3600 address is a constant or a register that is not a virtual register. */
3602 if (x == 0 || GET_CODE (x) != MEM)
3603 return;
3605 addr = XEXP (x, 0);
3606 if (CONSTANT_P (addr)
3607 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3608 || (GET_CODE (addr) == REG
3609 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3610 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3611 return;
3613 /* If we should only do this if the address is valid, copy the address.
3614 We need to do this so we can undo any changes that might make the
3615 address invalid. This copy is unfortunate, but probably can't be
3616 avoided. */
3618 if (valid_only)
3619 addr = copy_rtx (addr);
3621 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3623 if (valid_only && size >= 0)
3625 unsigned HOST_WIDE_INT decl_size = size;
3627 /* Now verify that the resulting address is valid for every integer or
3628 floating-point mode up to and including SIZE bytes long. We do this
3629 since the object might be accessed in any mode and frame addresses
3630 are shared. */
3632 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3633 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3634 mode = GET_MODE_WIDER_MODE (mode))
3635 if (! memory_address_p (mode, addr))
3636 return;
3638 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3639 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3640 mode = GET_MODE_WIDER_MODE (mode))
3641 if (! memory_address_p (mode, addr))
3642 return;
3645 /* Put back the address now that we have updated it and we either know
3646 it is valid or we don't care whether it is valid. */
3648 XEXP (x, 0) = addr;
3651 /* Given a pointer to a piece of rtx and an optional pointer to the
3652 containing object, instantiate any virtual registers present in it.
3654 If EXTRA_INSNS, we always do the replacement and generate
3655 any extra insns before OBJECT. If it zero, we do nothing if replacement
3656 is not valid.
3658 Return 1 if we either had nothing to do or if we were able to do the
3659 needed replacement. Return 0 otherwise; we only return zero if
3660 EXTRA_INSNS is zero.
3662 We first try some simple transformations to avoid the creation of extra
3663 pseudos. */
3665 static int
3666 instantiate_virtual_regs_1 (loc, object, extra_insns)
3667 rtx *loc;
3668 rtx object;
3669 int extra_insns;
3671 rtx x;
3672 RTX_CODE code;
3673 rtx new = 0;
3674 HOST_WIDE_INT offset = 0;
3675 rtx temp;
3676 rtx seq;
3677 int i, j;
3678 const char *fmt;
3680 /* Re-start here to avoid recursion in common cases. */
3681 restart:
3683 x = *loc;
3684 if (x == 0)
3685 return 1;
3687 code = GET_CODE (x);
3689 /* Check for some special cases. */
3690 switch (code)
3692 case CONST_INT:
3693 case CONST_DOUBLE:
3694 case CONST:
3695 case SYMBOL_REF:
3696 case CODE_LABEL:
3697 case PC:
3698 case CC0:
3699 case ASM_INPUT:
3700 case ADDR_VEC:
3701 case ADDR_DIFF_VEC:
3702 case RETURN:
3703 return 1;
3705 case SET:
3706 /* We are allowed to set the virtual registers. This means that
3707 the actual register should receive the source minus the
3708 appropriate offset. This is used, for example, in the handling
3709 of non-local gotos. */
3710 if (SET_DEST (x) == virtual_incoming_args_rtx)
3711 new = arg_pointer_rtx, offset = -in_arg_offset;
3712 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3713 new = frame_pointer_rtx, offset = -var_offset;
3714 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3715 new = stack_pointer_rtx, offset = -dynamic_offset;
3716 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3717 new = stack_pointer_rtx, offset = -out_arg_offset;
3718 else if (SET_DEST (x) == virtual_cfa_rtx)
3719 new = arg_pointer_rtx, offset = -cfa_offset;
3721 if (new)
3723 rtx src = SET_SRC (x);
3725 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3727 /* The only valid sources here are PLUS or REG. Just do
3728 the simplest possible thing to handle them. */
3729 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3730 abort ();
3732 start_sequence ();
3733 if (GET_CODE (src) != REG)
3734 temp = force_operand (src, NULL_RTX);
3735 else
3736 temp = src;
3737 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3738 seq = get_insns ();
3739 end_sequence ();
3741 emit_insns_before (seq, object);
3742 SET_DEST (x) = new;
3744 if (! validate_change (object, &SET_SRC (x), temp, 0)
3745 || ! extra_insns)
3746 abort ();
3748 return 1;
3751 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3752 loc = &SET_SRC (x);
3753 goto restart;
3755 case PLUS:
3756 /* Handle special case of virtual register plus constant. */
3757 if (CONSTANT_P (XEXP (x, 1)))
3759 rtx old, new_offset;
3761 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3762 if (GET_CODE (XEXP (x, 0)) == PLUS)
3764 rtx inner = XEXP (XEXP (x, 0), 0);
3766 if (inner == virtual_incoming_args_rtx)
3767 new = arg_pointer_rtx, offset = in_arg_offset;
3768 else if (inner == virtual_stack_vars_rtx)
3769 new = frame_pointer_rtx, offset = var_offset;
3770 else if (inner == virtual_stack_dynamic_rtx)
3771 new = stack_pointer_rtx, offset = dynamic_offset;
3772 else if (inner == virtual_outgoing_args_rtx)
3773 new = stack_pointer_rtx, offset = out_arg_offset;
3774 else if (inner == virtual_cfa_rtx)
3775 new = arg_pointer_rtx, offset = cfa_offset;
3776 else
3778 loc = &XEXP (x, 0);
3779 goto restart;
3782 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3783 extra_insns);
3784 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3787 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3788 new = arg_pointer_rtx, offset = in_arg_offset;
3789 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3790 new = frame_pointer_rtx, offset = var_offset;
3791 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3792 new = stack_pointer_rtx, offset = dynamic_offset;
3793 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3794 new = stack_pointer_rtx, offset = out_arg_offset;
3795 else if (XEXP (x, 0) == virtual_cfa_rtx)
3796 new = arg_pointer_rtx, offset = cfa_offset;
3797 else
3799 /* We know the second operand is a constant. Unless the
3800 first operand is a REG (which has been already checked),
3801 it needs to be checked. */
3802 if (GET_CODE (XEXP (x, 0)) != REG)
3804 loc = &XEXP (x, 0);
3805 goto restart;
3807 return 1;
3810 new_offset = plus_constant (XEXP (x, 1), offset);
3812 /* If the new constant is zero, try to replace the sum with just
3813 the register. */
3814 if (new_offset == const0_rtx
3815 && validate_change (object, loc, new, 0))
3816 return 1;
3818 /* Next try to replace the register and new offset.
3819 There are two changes to validate here and we can't assume that
3820 in the case of old offset equals new just changing the register
3821 will yield a valid insn. In the interests of a little efficiency,
3822 however, we only call validate change once (we don't queue up the
3823 changes and then call apply_change_group). */
3825 old = XEXP (x, 0);
3826 if (offset == 0
3827 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3828 : (XEXP (x, 0) = new,
3829 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3831 if (! extra_insns)
3833 XEXP (x, 0) = old;
3834 return 0;
3837 /* Otherwise copy the new constant into a register and replace
3838 constant with that register. */
3839 temp = gen_reg_rtx (Pmode);
3840 XEXP (x, 0) = new;
3841 if (validate_change (object, &XEXP (x, 1), temp, 0))
3842 emit_insn_before (gen_move_insn (temp, new_offset), object);
3843 else
3845 /* If that didn't work, replace this expression with a
3846 register containing the sum. */
3848 XEXP (x, 0) = old;
3849 new = gen_rtx_PLUS (Pmode, new, new_offset);
3851 start_sequence ();
3852 temp = force_operand (new, NULL_RTX);
3853 seq = get_insns ();
3854 end_sequence ();
3856 emit_insns_before (seq, object);
3857 if (! validate_change (object, loc, temp, 0)
3858 && ! validate_replace_rtx (x, temp, object))
3859 abort ();
3863 return 1;
3866 /* Fall through to generic two-operand expression case. */
3867 case EXPR_LIST:
3868 case CALL:
3869 case COMPARE:
3870 case MINUS:
3871 case MULT:
3872 case DIV: case UDIV:
3873 case MOD: case UMOD:
3874 case AND: case IOR: case XOR:
3875 case ROTATERT: case ROTATE:
3876 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3877 case NE: case EQ:
3878 case GE: case GT: case GEU: case GTU:
3879 case LE: case LT: case LEU: case LTU:
3880 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3881 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3882 loc = &XEXP (x, 0);
3883 goto restart;
3885 case MEM:
3886 /* Most cases of MEM that convert to valid addresses have already been
3887 handled by our scan of decls. The only special handling we
3888 need here is to make a copy of the rtx to ensure it isn't being
3889 shared if we have to change it to a pseudo.
3891 If the rtx is a simple reference to an address via a virtual register,
3892 it can potentially be shared. In such cases, first try to make it
3893 a valid address, which can also be shared. Otherwise, copy it and
3894 proceed normally.
3896 First check for common cases that need no processing. These are
3897 usually due to instantiation already being done on a previous instance
3898 of a shared rtx. */
3900 temp = XEXP (x, 0);
3901 if (CONSTANT_ADDRESS_P (temp)
3902 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3903 || temp == arg_pointer_rtx
3904 #endif
3905 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3906 || temp == hard_frame_pointer_rtx
3907 #endif
3908 || temp == frame_pointer_rtx)
3909 return 1;
3911 if (GET_CODE (temp) == PLUS
3912 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3913 && (XEXP (temp, 0) == frame_pointer_rtx
3914 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3915 || XEXP (temp, 0) == hard_frame_pointer_rtx
3916 #endif
3917 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3918 || XEXP (temp, 0) == arg_pointer_rtx
3919 #endif
3921 return 1;
3923 if (temp == virtual_stack_vars_rtx
3924 || temp == virtual_incoming_args_rtx
3925 || (GET_CODE (temp) == PLUS
3926 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3927 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3928 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3930 /* This MEM may be shared. If the substitution can be done without
3931 the need to generate new pseudos, we want to do it in place
3932 so all copies of the shared rtx benefit. The call below will
3933 only make substitutions if the resulting address is still
3934 valid.
3936 Note that we cannot pass X as the object in the recursive call
3937 since the insn being processed may not allow all valid
3938 addresses. However, if we were not passed on object, we can
3939 only modify X without copying it if X will have a valid
3940 address.
3942 ??? Also note that this can still lose if OBJECT is an insn that
3943 has less restrictions on an address that some other insn.
3944 In that case, we will modify the shared address. This case
3945 doesn't seem very likely, though. One case where this could
3946 happen is in the case of a USE or CLOBBER reference, but we
3947 take care of that below. */
3949 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3950 object ? object : x, 0))
3951 return 1;
3953 /* Otherwise make a copy and process that copy. We copy the entire
3954 RTL expression since it might be a PLUS which could also be
3955 shared. */
3956 *loc = x = copy_rtx (x);
3959 /* Fall through to generic unary operation case. */
3960 case SUBREG:
3961 case STRICT_LOW_PART:
3962 case NEG: case NOT:
3963 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3964 case SIGN_EXTEND: case ZERO_EXTEND:
3965 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3966 case FLOAT: case FIX:
3967 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3968 case ABS:
3969 case SQRT:
3970 case FFS:
3971 /* These case either have just one operand or we know that we need not
3972 check the rest of the operands. */
3973 loc = &XEXP (x, 0);
3974 goto restart;
3976 case USE:
3977 case CLOBBER:
3978 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3979 go ahead and make the invalid one, but do it to a copy. For a REG,
3980 just make the recursive call, since there's no chance of a problem. */
3982 if ((GET_CODE (XEXP (x, 0)) == MEM
3983 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3985 || (GET_CODE (XEXP (x, 0)) == REG
3986 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3987 return 1;
3989 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3990 loc = &XEXP (x, 0);
3991 goto restart;
3993 case REG:
3994 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3995 in front of this insn and substitute the temporary. */
3996 if (x == virtual_incoming_args_rtx)
3997 new = arg_pointer_rtx, offset = in_arg_offset;
3998 else if (x == virtual_stack_vars_rtx)
3999 new = frame_pointer_rtx, offset = var_offset;
4000 else if (x == virtual_stack_dynamic_rtx)
4001 new = stack_pointer_rtx, offset = dynamic_offset;
4002 else if (x == virtual_outgoing_args_rtx)
4003 new = stack_pointer_rtx, offset = out_arg_offset;
4004 else if (x == virtual_cfa_rtx)
4005 new = arg_pointer_rtx, offset = cfa_offset;
4007 if (new)
4009 temp = plus_constant (new, offset);
4010 if (!validate_change (object, loc, temp, 0))
4012 if (! extra_insns)
4013 return 0;
4015 start_sequence ();
4016 temp = force_operand (temp, NULL_RTX);
4017 seq = get_insns ();
4018 end_sequence ();
4020 emit_insns_before (seq, object);
4021 if (! validate_change (object, loc, temp, 0)
4022 && ! validate_replace_rtx (x, temp, object))
4023 abort ();
4027 return 1;
4029 case ADDRESSOF:
4030 if (GET_CODE (XEXP (x, 0)) == REG)
4031 return 1;
4033 else if (GET_CODE (XEXP (x, 0)) == MEM)
4035 /* If we have a (addressof (mem ..)), do any instantiation inside
4036 since we know we'll be making the inside valid when we finally
4037 remove the ADDRESSOF. */
4038 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4039 return 1;
4041 break;
4043 default:
4044 break;
4047 /* Scan all subexpressions. */
4048 fmt = GET_RTX_FORMAT (code);
4049 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4050 if (*fmt == 'e')
4052 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4053 return 0;
4055 else if (*fmt == 'E')
4056 for (j = 0; j < XVECLEN (x, i); j++)
4057 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4058 extra_insns))
4059 return 0;
4061 return 1;
4064 /* Optimization: assuming this function does not receive nonlocal gotos,
4065 delete the handlers for such, as well as the insns to establish
4066 and disestablish them. */
4068 static void
4069 delete_handlers ()
4071 rtx insn;
4072 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4074 /* Delete the handler by turning off the flag that would
4075 prevent jump_optimize from deleting it.
4076 Also permit deletion of the nonlocal labels themselves
4077 if nothing local refers to them. */
4078 if (GET_CODE (insn) == CODE_LABEL)
4080 tree t, last_t;
4082 LABEL_PRESERVE_P (insn) = 0;
4084 /* Remove it from the nonlocal_label list, to avoid confusing
4085 flow. */
4086 for (t = nonlocal_labels, last_t = 0; t;
4087 last_t = t, t = TREE_CHAIN (t))
4088 if (DECL_RTL (TREE_VALUE (t)) == insn)
4089 break;
4090 if (t)
4092 if (! last_t)
4093 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4094 else
4095 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4098 if (GET_CODE (insn) == INSN)
4100 int can_delete = 0;
4101 rtx t;
4102 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4103 if (reg_mentioned_p (t, PATTERN (insn)))
4105 can_delete = 1;
4106 break;
4108 if (can_delete
4109 || (nonlocal_goto_stack_level != 0
4110 && reg_mentioned_p (nonlocal_goto_stack_level,
4111 PATTERN (insn))))
4112 delete_insn (insn);
4118 max_parm_reg_num ()
4120 return max_parm_reg;
4123 /* Return the first insn following those generated by `assign_parms'. */
4126 get_first_nonparm_insn ()
4128 if (last_parm_insn)
4129 return NEXT_INSN (last_parm_insn);
4130 return get_insns ();
4133 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4134 Crash if there is none. */
4137 get_first_block_beg ()
4139 register rtx searcher;
4140 register rtx insn = get_first_nonparm_insn ();
4142 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4143 if (GET_CODE (searcher) == NOTE
4144 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4145 return searcher;
4147 abort (); /* Invalid call to this function. (See comments above.) */
4148 return NULL_RTX;
4151 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4152 This means a type for which function calls must pass an address to the
4153 function or get an address back from the function.
4154 EXP may be a type node or an expression (whose type is tested). */
4157 aggregate_value_p (exp)
4158 tree exp;
4160 int i, regno, nregs;
4161 rtx reg;
4163 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4165 if (TREE_CODE (type) == VOID_TYPE)
4166 return 0;
4167 if (RETURN_IN_MEMORY (type))
4168 return 1;
4169 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4170 and thus can't be returned in registers. */
4171 if (TREE_ADDRESSABLE (type))
4172 return 1;
4173 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4174 return 1;
4175 /* Make sure we have suitable call-clobbered regs to return
4176 the value in; if not, we must return it in memory. */
4177 reg = hard_function_value (type, 0, 0);
4179 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4180 it is OK. */
4181 if (GET_CODE (reg) != REG)
4182 return 0;
4184 regno = REGNO (reg);
4185 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4186 for (i = 0; i < nregs; i++)
4187 if (! call_used_regs[regno + i])
4188 return 1;
4189 return 0;
4192 /* Assign RTL expressions to the function's parameters.
4193 This may involve copying them into registers and using
4194 those registers as the RTL for them. */
4196 void
4197 assign_parms (fndecl)
4198 tree fndecl;
4200 register tree parm;
4201 register rtx entry_parm = 0;
4202 register rtx stack_parm = 0;
4203 CUMULATIVE_ARGS args_so_far;
4204 enum machine_mode promoted_mode, passed_mode;
4205 enum machine_mode nominal_mode, promoted_nominal_mode;
4206 int unsignedp;
4207 /* Total space needed so far for args on the stack,
4208 given as a constant and a tree-expression. */
4209 struct args_size stack_args_size;
4210 tree fntype = TREE_TYPE (fndecl);
4211 tree fnargs = DECL_ARGUMENTS (fndecl);
4212 /* This is used for the arg pointer when referring to stack args. */
4213 rtx internal_arg_pointer;
4214 /* This is a dummy PARM_DECL that we used for the function result if
4215 the function returns a structure. */
4216 tree function_result_decl = 0;
4217 #ifdef SETUP_INCOMING_VARARGS
4218 int varargs_setup = 0;
4219 #endif
4220 rtx conversion_insns = 0;
4221 struct args_size alignment_pad;
4223 /* Nonzero if the last arg is named `__builtin_va_alist',
4224 which is used on some machines for old-fashioned non-ANSI varargs.h;
4225 this should be stuck onto the stack as if it had arrived there. */
4226 int hide_last_arg
4227 = (current_function_varargs
4228 && fnargs
4229 && (parm = tree_last (fnargs)) != 0
4230 && DECL_NAME (parm)
4231 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4232 "__builtin_va_alist")));
4234 /* Nonzero if function takes extra anonymous args.
4235 This means the last named arg must be on the stack
4236 right before the anonymous ones. */
4237 int stdarg
4238 = (TYPE_ARG_TYPES (fntype) != 0
4239 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4240 != void_type_node));
4242 current_function_stdarg = stdarg;
4244 /* If the reg that the virtual arg pointer will be translated into is
4245 not a fixed reg or is the stack pointer, make a copy of the virtual
4246 arg pointer, and address parms via the copy. The frame pointer is
4247 considered fixed even though it is not marked as such.
4249 The second time through, simply use ap to avoid generating rtx. */
4251 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4252 || ! (fixed_regs[ARG_POINTER_REGNUM]
4253 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4254 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4255 else
4256 internal_arg_pointer = virtual_incoming_args_rtx;
4257 current_function_internal_arg_pointer = internal_arg_pointer;
4259 stack_args_size.constant = 0;
4260 stack_args_size.var = 0;
4262 /* If struct value address is treated as the first argument, make it so. */
4263 if (aggregate_value_p (DECL_RESULT (fndecl))
4264 && ! current_function_returns_pcc_struct
4265 && struct_value_incoming_rtx == 0)
4267 tree type = build_pointer_type (TREE_TYPE (fntype));
4269 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4271 DECL_ARG_TYPE (function_result_decl) = type;
4272 TREE_CHAIN (function_result_decl) = fnargs;
4273 fnargs = function_result_decl;
4276 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4277 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4279 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4280 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4281 #else
4282 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4283 #endif
4285 /* We haven't yet found an argument that we must push and pretend the
4286 caller did. */
4287 current_function_pretend_args_size = 0;
4289 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4291 struct args_size stack_offset;
4292 struct args_size arg_size;
4293 int passed_pointer = 0;
4294 int did_conversion = 0;
4295 tree passed_type = DECL_ARG_TYPE (parm);
4296 tree nominal_type = TREE_TYPE (parm);
4297 int pretend_named;
4299 /* Set LAST_NAMED if this is last named arg before some
4300 anonymous args. */
4301 int last_named = ((TREE_CHAIN (parm) == 0
4302 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4303 && (stdarg || current_function_varargs));
4304 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4305 most machines, if this is a varargs/stdarg function, then we treat
4306 the last named arg as if it were anonymous too. */
4307 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4309 if (TREE_TYPE (parm) == error_mark_node
4310 /* This can happen after weird syntax errors
4311 or if an enum type is defined among the parms. */
4312 || TREE_CODE (parm) != PARM_DECL
4313 || passed_type == NULL)
4315 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4316 = gen_rtx_MEM (BLKmode, const0_rtx);
4317 TREE_USED (parm) = 1;
4318 continue;
4321 /* For varargs.h function, save info about regs and stack space
4322 used by the individual args, not including the va_alist arg. */
4323 if (hide_last_arg && last_named)
4324 current_function_args_info = args_so_far;
4326 /* Find mode of arg as it is passed, and mode of arg
4327 as it should be during execution of this function. */
4328 passed_mode = TYPE_MODE (passed_type);
4329 nominal_mode = TYPE_MODE (nominal_type);
4331 /* If the parm's mode is VOID, its value doesn't matter,
4332 and avoid the usual things like emit_move_insn that could crash. */
4333 if (nominal_mode == VOIDmode)
4335 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4336 continue;
4339 /* If the parm is to be passed as a transparent union, use the
4340 type of the first field for the tests below. We have already
4341 verified that the modes are the same. */
4342 if (DECL_TRANSPARENT_UNION (parm)
4343 || (TREE_CODE (passed_type) == UNION_TYPE
4344 && TYPE_TRANSPARENT_UNION (passed_type)))
4345 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4347 /* See if this arg was passed by invisible reference. It is if
4348 it is an object whose size depends on the contents of the
4349 object itself or if the machine requires these objects be passed
4350 that way. */
4352 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4353 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4354 || TREE_ADDRESSABLE (passed_type)
4355 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4356 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4357 passed_type, named_arg)
4358 #endif
4361 passed_type = nominal_type = build_pointer_type (passed_type);
4362 passed_pointer = 1;
4363 passed_mode = nominal_mode = Pmode;
4366 promoted_mode = passed_mode;
4368 #ifdef PROMOTE_FUNCTION_ARGS
4369 /* Compute the mode in which the arg is actually extended to. */
4370 unsignedp = TREE_UNSIGNED (passed_type);
4371 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4372 #endif
4374 /* Let machine desc say which reg (if any) the parm arrives in.
4375 0 means it arrives on the stack. */
4376 #ifdef FUNCTION_INCOMING_ARG
4377 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4378 passed_type, named_arg);
4379 #else
4380 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4381 passed_type, named_arg);
4382 #endif
4384 if (entry_parm == 0)
4385 promoted_mode = passed_mode;
4387 #ifdef SETUP_INCOMING_VARARGS
4388 /* If this is the last named parameter, do any required setup for
4389 varargs or stdargs. We need to know about the case of this being an
4390 addressable type, in which case we skip the registers it
4391 would have arrived in.
4393 For stdargs, LAST_NAMED will be set for two parameters, the one that
4394 is actually the last named, and the dummy parameter. We only
4395 want to do this action once.
4397 Also, indicate when RTL generation is to be suppressed. */
4398 if (last_named && !varargs_setup)
4400 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4401 current_function_pretend_args_size, 0);
4402 varargs_setup = 1;
4404 #endif
4406 /* Determine parm's home in the stack,
4407 in case it arrives in the stack or we should pretend it did.
4409 Compute the stack position and rtx where the argument arrives
4410 and its size.
4412 There is one complexity here: If this was a parameter that would
4413 have been passed in registers, but wasn't only because it is
4414 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4415 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4416 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4417 0 as it was the previous time. */
4419 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4420 locate_and_pad_parm (promoted_mode, passed_type,
4421 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4423 #else
4424 #ifdef FUNCTION_INCOMING_ARG
4425 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4426 passed_type,
4427 pretend_named) != 0,
4428 #else
4429 FUNCTION_ARG (args_so_far, promoted_mode,
4430 passed_type,
4431 pretend_named) != 0,
4432 #endif
4433 #endif
4434 fndecl, &stack_args_size, &stack_offset, &arg_size,
4435 &alignment_pad);
4438 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4440 if (offset_rtx == const0_rtx)
4441 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4442 else
4443 stack_parm = gen_rtx_MEM (promoted_mode,
4444 gen_rtx_PLUS (Pmode,
4445 internal_arg_pointer,
4446 offset_rtx));
4448 set_mem_attributes (stack_parm, parm, 1);
4451 /* If this parameter was passed both in registers and in the stack,
4452 use the copy on the stack. */
4453 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4454 entry_parm = 0;
4456 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4457 /* If this parm was passed part in regs and part in memory,
4458 pretend it arrived entirely in memory
4459 by pushing the register-part onto the stack.
4461 In the special case of a DImode or DFmode that is split,
4462 we could put it together in a pseudoreg directly,
4463 but for now that's not worth bothering with. */
4465 if (entry_parm)
4467 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4468 passed_type, named_arg);
4470 if (nregs > 0)
4472 current_function_pretend_args_size
4473 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4474 / (PARM_BOUNDARY / BITS_PER_UNIT)
4475 * (PARM_BOUNDARY / BITS_PER_UNIT));
4477 /* Handle calls that pass values in multiple non-contiguous
4478 locations. The Irix 6 ABI has examples of this. */
4479 if (GET_CODE (entry_parm) == PARALLEL)
4480 emit_group_store (validize_mem (stack_parm), entry_parm,
4481 int_size_in_bytes (TREE_TYPE (parm)),
4482 TYPE_ALIGN (TREE_TYPE (parm)));
4484 else
4485 move_block_from_reg (REGNO (entry_parm),
4486 validize_mem (stack_parm), nregs,
4487 int_size_in_bytes (TREE_TYPE (parm)));
4489 entry_parm = stack_parm;
4492 #endif
4494 /* If we didn't decide this parm came in a register,
4495 by default it came on the stack. */
4496 if (entry_parm == 0)
4497 entry_parm = stack_parm;
4499 /* Record permanently how this parm was passed. */
4500 DECL_INCOMING_RTL (parm) = entry_parm;
4502 /* If there is actually space on the stack for this parm,
4503 count it in stack_args_size; otherwise set stack_parm to 0
4504 to indicate there is no preallocated stack slot for the parm. */
4506 if (entry_parm == stack_parm
4507 || (GET_CODE (entry_parm) == PARALLEL
4508 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4509 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4510 /* On some machines, even if a parm value arrives in a register
4511 there is still an (uninitialized) stack slot allocated for it.
4513 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4514 whether this parameter already has a stack slot allocated,
4515 because an arg block exists only if current_function_args_size
4516 is larger than some threshold, and we haven't calculated that
4517 yet. So, for now, we just assume that stack slots never exist
4518 in this case. */
4519 || REG_PARM_STACK_SPACE (fndecl) > 0
4520 #endif
4523 stack_args_size.constant += arg_size.constant;
4524 if (arg_size.var)
4525 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4527 else
4528 /* No stack slot was pushed for this parm. */
4529 stack_parm = 0;
4531 /* Update info on where next arg arrives in registers. */
4533 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4534 passed_type, named_arg);
4536 /* If we can't trust the parm stack slot to be aligned enough
4537 for its ultimate type, don't use that slot after entry.
4538 We'll make another stack slot, if we need one. */
4540 unsigned int thisparm_boundary
4541 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4543 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4544 stack_parm = 0;
4547 /* If parm was passed in memory, and we need to convert it on entry,
4548 don't store it back in that same slot. */
4549 if (entry_parm != 0
4550 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4551 stack_parm = 0;
4553 /* When an argument is passed in multiple locations, we can't
4554 make use of this information, but we can save some copying if
4555 the whole argument is passed in a single register. */
4556 if (GET_CODE (entry_parm) == PARALLEL
4557 && nominal_mode != BLKmode && passed_mode != BLKmode)
4559 int i, len = XVECLEN (entry_parm, 0);
4561 for (i = 0; i < len; i++)
4562 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4563 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4564 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4565 == passed_mode)
4566 && XINT (XEXP (XVECEXP (entry_parm, 0, i), 1), 0) == 0)
4568 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4569 DECL_INCOMING_RTL (parm) = entry_parm;
4570 break;
4574 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4575 in the mode in which it arrives.
4576 STACK_PARM is an RTX for a stack slot where the parameter can live
4577 during the function (in case we want to put it there).
4578 STACK_PARM is 0 if no stack slot was pushed for it.
4580 Now output code if necessary to convert ENTRY_PARM to
4581 the type in which this function declares it,
4582 and store that result in an appropriate place,
4583 which may be a pseudo reg, may be STACK_PARM,
4584 or may be a local stack slot if STACK_PARM is 0.
4586 Set DECL_RTL to that place. */
4588 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4590 /* If a BLKmode arrives in registers, copy it to a stack slot.
4591 Handle calls that pass values in multiple non-contiguous
4592 locations. The Irix 6 ABI has examples of this. */
4593 if (GET_CODE (entry_parm) == REG
4594 || GET_CODE (entry_parm) == PARALLEL)
4596 int size_stored
4597 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4598 UNITS_PER_WORD);
4600 /* Note that we will be storing an integral number of words.
4601 So we have to be careful to ensure that we allocate an
4602 integral number of words. We do this below in the
4603 assign_stack_local if space was not allocated in the argument
4604 list. If it was, this will not work if PARM_BOUNDARY is not
4605 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4606 if it becomes a problem. */
4608 if (stack_parm == 0)
4610 stack_parm
4611 = assign_stack_local (GET_MODE (entry_parm),
4612 size_stored, 0);
4613 set_mem_attributes (stack_parm, parm, 1);
4616 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4617 abort ();
4619 /* Handle calls that pass values in multiple non-contiguous
4620 locations. The Irix 6 ABI has examples of this. */
4621 if (GET_CODE (entry_parm) == PARALLEL)
4622 emit_group_store (validize_mem (stack_parm), entry_parm,
4623 int_size_in_bytes (TREE_TYPE (parm)),
4624 TYPE_ALIGN (TREE_TYPE (parm)));
4625 else
4626 move_block_from_reg (REGNO (entry_parm),
4627 validize_mem (stack_parm),
4628 size_stored / UNITS_PER_WORD,
4629 int_size_in_bytes (TREE_TYPE (parm)));
4631 DECL_RTL (parm) = stack_parm;
4633 else if (! ((! optimize
4634 && ! DECL_REGISTER (parm)
4635 && ! DECL_INLINE (fndecl))
4636 || TREE_SIDE_EFFECTS (parm)
4637 /* If -ffloat-store specified, don't put explicit
4638 float variables into registers. */
4639 || (flag_float_store
4640 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4641 /* Always assign pseudo to structure return or item passed
4642 by invisible reference. */
4643 || passed_pointer || parm == function_result_decl)
4645 /* Store the parm in a pseudoregister during the function, but we
4646 may need to do it in a wider mode. */
4648 register rtx parmreg;
4649 unsigned int regno, regnoi = 0, regnor = 0;
4651 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4653 promoted_nominal_mode
4654 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4656 parmreg = gen_reg_rtx (promoted_nominal_mode);
4657 mark_user_reg (parmreg);
4659 /* If this was an item that we received a pointer to, set DECL_RTL
4660 appropriately. */
4661 if (passed_pointer)
4663 DECL_RTL (parm)
4664 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4665 set_mem_attributes (DECL_RTL (parm), parm, 1);
4667 else
4668 DECL_RTL (parm) = parmreg;
4670 /* Copy the value into the register. */
4671 if (nominal_mode != passed_mode
4672 || promoted_nominal_mode != promoted_mode)
4674 int save_tree_used;
4675 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4676 mode, by the caller. We now have to convert it to
4677 NOMINAL_MODE, if different. However, PARMREG may be in
4678 a different mode than NOMINAL_MODE if it is being stored
4679 promoted.
4681 If ENTRY_PARM is a hard register, it might be in a register
4682 not valid for operating in its mode (e.g., an odd-numbered
4683 register for a DFmode). In that case, moves are the only
4684 thing valid, so we can't do a convert from there. This
4685 occurs when the calling sequence allow such misaligned
4686 usages.
4688 In addition, the conversion may involve a call, which could
4689 clobber parameters which haven't been copied to pseudo
4690 registers yet. Therefore, we must first copy the parm to
4691 a pseudo reg here, and save the conversion until after all
4692 parameters have been moved. */
4694 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4696 emit_move_insn (tempreg, validize_mem (entry_parm));
4698 push_to_sequence (conversion_insns);
4699 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4701 /* TREE_USED gets set erroneously during expand_assignment. */
4702 save_tree_used = TREE_USED (parm);
4703 expand_assignment (parm,
4704 make_tree (nominal_type, tempreg), 0, 0);
4705 TREE_USED (parm) = save_tree_used;
4706 conversion_insns = get_insns ();
4707 did_conversion = 1;
4708 end_sequence ();
4710 else
4711 emit_move_insn (parmreg, validize_mem (entry_parm));
4713 /* If we were passed a pointer but the actual value
4714 can safely live in a register, put it in one. */
4715 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4716 && ! ((! optimize
4717 && ! DECL_REGISTER (parm)
4718 && ! DECL_INLINE (fndecl))
4719 || TREE_SIDE_EFFECTS (parm)
4720 /* If -ffloat-store specified, don't put explicit
4721 float variables into registers. */
4722 || (flag_float_store
4723 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4725 /* We can't use nominal_mode, because it will have been set to
4726 Pmode above. We must use the actual mode of the parm. */
4727 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4728 mark_user_reg (parmreg);
4729 emit_move_insn (parmreg, DECL_RTL (parm));
4730 DECL_RTL (parm) = parmreg;
4731 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4732 now the parm. */
4733 stack_parm = 0;
4735 #ifdef FUNCTION_ARG_CALLEE_COPIES
4736 /* If we are passed an arg by reference and it is our responsibility
4737 to make a copy, do it now.
4738 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4739 original argument, so we must recreate them in the call to
4740 FUNCTION_ARG_CALLEE_COPIES. */
4741 /* ??? Later add code to handle the case that if the argument isn't
4742 modified, don't do the copy. */
4744 else if (passed_pointer
4745 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4746 TYPE_MODE (DECL_ARG_TYPE (parm)),
4747 DECL_ARG_TYPE (parm),
4748 named_arg)
4749 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4751 rtx copy;
4752 tree type = DECL_ARG_TYPE (parm);
4754 /* This sequence may involve a library call perhaps clobbering
4755 registers that haven't been copied to pseudos yet. */
4757 push_to_sequence (conversion_insns);
4759 if (!COMPLETE_TYPE_P (type)
4760 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4761 /* This is a variable sized object. */
4762 copy = gen_rtx_MEM (BLKmode,
4763 allocate_dynamic_stack_space
4764 (expr_size (parm), NULL_RTX,
4765 TYPE_ALIGN (type)));
4766 else
4767 copy = assign_stack_temp (TYPE_MODE (type),
4768 int_size_in_bytes (type), 1);
4769 set_mem_attributes (copy, parm, 1);
4771 store_expr (parm, copy, 0);
4772 emit_move_insn (parmreg, XEXP (copy, 0));
4773 if (current_function_check_memory_usage)
4774 emit_library_call (chkr_set_right_libfunc,
4775 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4776 XEXP (copy, 0), Pmode,
4777 GEN_INT (int_size_in_bytes (type)),
4778 TYPE_MODE (sizetype),
4779 GEN_INT (MEMORY_USE_RW),
4780 TYPE_MODE (integer_type_node));
4781 conversion_insns = get_insns ();
4782 did_conversion = 1;
4783 end_sequence ();
4785 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4787 /* In any case, record the parm's desired stack location
4788 in case we later discover it must live in the stack.
4790 If it is a COMPLEX value, store the stack location for both
4791 halves. */
4793 if (GET_CODE (parmreg) == CONCAT)
4794 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4795 else
4796 regno = REGNO (parmreg);
4798 if (regno >= max_parm_reg)
4800 rtx *new;
4801 int old_max_parm_reg = max_parm_reg;
4803 /* It's slow to expand this one register at a time,
4804 but it's also rare and we need max_parm_reg to be
4805 precisely correct. */
4806 max_parm_reg = regno + 1;
4807 new = (rtx *) xrealloc (parm_reg_stack_loc,
4808 max_parm_reg * sizeof (rtx));
4809 memset ((char *) (new + old_max_parm_reg), 0,
4810 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4811 parm_reg_stack_loc = new;
4814 if (GET_CODE (parmreg) == CONCAT)
4816 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4818 regnor = REGNO (gen_realpart (submode, parmreg));
4819 regnoi = REGNO (gen_imagpart (submode, parmreg));
4821 if (stack_parm != 0)
4823 parm_reg_stack_loc[regnor]
4824 = gen_realpart (submode, stack_parm);
4825 parm_reg_stack_loc[regnoi]
4826 = gen_imagpart (submode, stack_parm);
4828 else
4830 parm_reg_stack_loc[regnor] = 0;
4831 parm_reg_stack_loc[regnoi] = 0;
4834 else
4835 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4837 /* Mark the register as eliminable if we did no conversion
4838 and it was copied from memory at a fixed offset,
4839 and the arg pointer was not copied to a pseudo-reg.
4840 If the arg pointer is a pseudo reg or the offset formed
4841 an invalid address, such memory-equivalences
4842 as we make here would screw up life analysis for it. */
4843 if (nominal_mode == passed_mode
4844 && ! did_conversion
4845 && stack_parm != 0
4846 && GET_CODE (stack_parm) == MEM
4847 && stack_offset.var == 0
4848 && reg_mentioned_p (virtual_incoming_args_rtx,
4849 XEXP (stack_parm, 0)))
4851 rtx linsn = get_last_insn ();
4852 rtx sinsn, set;
4854 /* Mark complex types separately. */
4855 if (GET_CODE (parmreg) == CONCAT)
4856 /* Scan backwards for the set of the real and
4857 imaginary parts. */
4858 for (sinsn = linsn; sinsn != 0;
4859 sinsn = prev_nonnote_insn (sinsn))
4861 set = single_set (sinsn);
4862 if (set != 0
4863 && SET_DEST (set) == regno_reg_rtx [regnoi])
4864 REG_NOTES (sinsn)
4865 = gen_rtx_EXPR_LIST (REG_EQUIV,
4866 parm_reg_stack_loc[regnoi],
4867 REG_NOTES (sinsn));
4868 else if (set != 0
4869 && SET_DEST (set) == regno_reg_rtx [regnor])
4870 REG_NOTES (sinsn)
4871 = gen_rtx_EXPR_LIST (REG_EQUIV,
4872 parm_reg_stack_loc[regnor],
4873 REG_NOTES (sinsn));
4875 else if ((set = single_set (linsn)) != 0
4876 && SET_DEST (set) == parmreg)
4877 REG_NOTES (linsn)
4878 = gen_rtx_EXPR_LIST (REG_EQUIV,
4879 stack_parm, REG_NOTES (linsn));
4882 /* For pointer data type, suggest pointer register. */
4883 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4884 mark_reg_pointer (parmreg,
4885 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4887 /* If something wants our address, try to use ADDRESSOF. */
4888 if (TREE_ADDRESSABLE (parm))
4890 /* If we end up putting something into the stack,
4891 fixup_var_refs_insns will need to make a pass over
4892 all the instructions. It looks throughs the pending
4893 sequences -- but it can't see the ones in the
4894 CONVERSION_INSNS, if they're not on the sequence
4895 stack. So, we go back to that sequence, just so that
4896 the fixups will happen. */
4897 push_to_sequence (conversion_insns);
4898 put_var_into_stack (parm);
4899 conversion_insns = get_insns ();
4900 end_sequence ();
4903 else
4905 /* Value must be stored in the stack slot STACK_PARM
4906 during function execution. */
4908 if (promoted_mode != nominal_mode)
4910 /* Conversion is required. */
4911 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4913 emit_move_insn (tempreg, validize_mem (entry_parm));
4915 push_to_sequence (conversion_insns);
4916 entry_parm = convert_to_mode (nominal_mode, tempreg,
4917 TREE_UNSIGNED (TREE_TYPE (parm)));
4918 if (stack_parm)
4920 /* ??? This may need a big-endian conversion on sparc64. */
4921 stack_parm = change_address (stack_parm, nominal_mode,
4922 NULL_RTX);
4924 conversion_insns = get_insns ();
4925 did_conversion = 1;
4926 end_sequence ();
4929 if (entry_parm != stack_parm)
4931 if (stack_parm == 0)
4933 stack_parm
4934 = assign_stack_local (GET_MODE (entry_parm),
4935 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4936 set_mem_attributes (stack_parm, parm, 1);
4939 if (promoted_mode != nominal_mode)
4941 push_to_sequence (conversion_insns);
4942 emit_move_insn (validize_mem (stack_parm),
4943 validize_mem (entry_parm));
4944 conversion_insns = get_insns ();
4945 end_sequence ();
4947 else
4948 emit_move_insn (validize_mem (stack_parm),
4949 validize_mem (entry_parm));
4951 if (current_function_check_memory_usage)
4953 push_to_sequence (conversion_insns);
4954 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
4955 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
4956 GEN_INT (GET_MODE_SIZE (GET_MODE
4957 (entry_parm))),
4958 TYPE_MODE (sizetype),
4959 GEN_INT (MEMORY_USE_RW),
4960 TYPE_MODE (integer_type_node));
4962 conversion_insns = get_insns ();
4963 end_sequence ();
4965 DECL_RTL (parm) = stack_parm;
4968 /* If this "parameter" was the place where we are receiving the
4969 function's incoming structure pointer, set up the result. */
4970 if (parm == function_result_decl)
4972 tree result = DECL_RESULT (fndecl);
4974 DECL_RTL (result)
4975 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4977 set_mem_attributes (DECL_RTL (result), result, 1);
4981 /* Output all parameter conversion instructions (possibly including calls)
4982 now that all parameters have been copied out of hard registers. */
4983 emit_insns (conversion_insns);
4985 last_parm_insn = get_last_insn ();
4987 current_function_args_size = stack_args_size.constant;
4989 /* Adjust function incoming argument size for alignment and
4990 minimum length. */
4992 #ifdef REG_PARM_STACK_SPACE
4993 #ifndef MAYBE_REG_PARM_STACK_SPACE
4994 current_function_args_size = MAX (current_function_args_size,
4995 REG_PARM_STACK_SPACE (fndecl));
4996 #endif
4997 #endif
4999 #ifdef STACK_BOUNDARY
5000 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5002 current_function_args_size
5003 = ((current_function_args_size + STACK_BYTES - 1)
5004 / STACK_BYTES) * STACK_BYTES;
5005 #endif
5007 #ifdef ARGS_GROW_DOWNWARD
5008 current_function_arg_offset_rtx
5009 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5010 : expand_expr (size_diffop (stack_args_size.var,
5011 size_int (-stack_args_size.constant)),
5012 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5013 #else
5014 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5015 #endif
5017 /* See how many bytes, if any, of its args a function should try to pop
5018 on return. */
5020 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5021 current_function_args_size);
5023 /* For stdarg.h function, save info about
5024 regs and stack space used by the named args. */
5026 if (!hide_last_arg)
5027 current_function_args_info = args_so_far;
5029 /* Set the rtx used for the function return value. Put this in its
5030 own variable so any optimizers that need this information don't have
5031 to include tree.h. Do this here so it gets done when an inlined
5032 function gets output. */
5034 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5037 /* Indicate whether REGNO is an incoming argument to the current function
5038 that was promoted to a wider mode. If so, return the RTX for the
5039 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5040 that REGNO is promoted from and whether the promotion was signed or
5041 unsigned. */
5043 #ifdef PROMOTE_FUNCTION_ARGS
5046 promoted_input_arg (regno, pmode, punsignedp)
5047 unsigned int regno;
5048 enum machine_mode *pmode;
5049 int *punsignedp;
5051 tree arg;
5053 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5054 arg = TREE_CHAIN (arg))
5055 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5056 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5057 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5059 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5060 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5062 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5063 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5064 && mode != DECL_MODE (arg))
5066 *pmode = DECL_MODE (arg);
5067 *punsignedp = unsignedp;
5068 return DECL_INCOMING_RTL (arg);
5072 return 0;
5075 #endif
5077 /* Compute the size and offset from the start of the stacked arguments for a
5078 parm passed in mode PASSED_MODE and with type TYPE.
5080 INITIAL_OFFSET_PTR points to the current offset into the stacked
5081 arguments.
5083 The starting offset and size for this parm are returned in *OFFSET_PTR
5084 and *ARG_SIZE_PTR, respectively.
5086 IN_REGS is non-zero if the argument will be passed in registers. It will
5087 never be set if REG_PARM_STACK_SPACE is not defined.
5089 FNDECL is the function in which the argument was defined.
5091 There are two types of rounding that are done. The first, controlled by
5092 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5093 list to be aligned to the specific boundary (in bits). This rounding
5094 affects the initial and starting offsets, but not the argument size.
5096 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5097 optionally rounds the size of the parm to PARM_BOUNDARY. The
5098 initial offset is not affected by this rounding, while the size always
5099 is and the starting offset may be. */
5101 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5102 initial_offset_ptr is positive because locate_and_pad_parm's
5103 callers pass in the total size of args so far as
5104 initial_offset_ptr. arg_size_ptr is always positive.*/
5106 void
5107 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5108 initial_offset_ptr, offset_ptr, arg_size_ptr,
5109 alignment_pad)
5110 enum machine_mode passed_mode;
5111 tree type;
5112 int in_regs ATTRIBUTE_UNUSED;
5113 tree fndecl ATTRIBUTE_UNUSED;
5114 struct args_size *initial_offset_ptr;
5115 struct args_size *offset_ptr;
5116 struct args_size *arg_size_ptr;
5117 struct args_size *alignment_pad;
5120 tree sizetree
5121 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5122 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5123 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5125 #ifdef REG_PARM_STACK_SPACE
5126 /* If we have found a stack parm before we reach the end of the
5127 area reserved for registers, skip that area. */
5128 if (! in_regs)
5130 int reg_parm_stack_space = 0;
5132 #ifdef MAYBE_REG_PARM_STACK_SPACE
5133 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5134 #else
5135 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5136 #endif
5137 if (reg_parm_stack_space > 0)
5139 if (initial_offset_ptr->var)
5141 initial_offset_ptr->var
5142 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5143 ssize_int (reg_parm_stack_space));
5144 initial_offset_ptr->constant = 0;
5146 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5147 initial_offset_ptr->constant = reg_parm_stack_space;
5150 #endif /* REG_PARM_STACK_SPACE */
5152 arg_size_ptr->var = 0;
5153 arg_size_ptr->constant = 0;
5154 alignment_pad->var = 0;
5155 alignment_pad->constant = 0;
5157 #ifdef ARGS_GROW_DOWNWARD
5158 if (initial_offset_ptr->var)
5160 offset_ptr->constant = 0;
5161 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5162 initial_offset_ptr->var);
5164 else
5166 offset_ptr->constant = -initial_offset_ptr->constant;
5167 offset_ptr->var = 0;
5169 if (where_pad != none
5170 && (!host_integerp (sizetree, 1)
5171 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5172 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5173 SUB_PARM_SIZE (*offset_ptr, sizetree);
5174 if (where_pad != downward)
5175 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5176 if (initial_offset_ptr->var)
5177 arg_size_ptr->var = size_binop (MINUS_EXPR,
5178 size_binop (MINUS_EXPR,
5179 ssize_int (0),
5180 initial_offset_ptr->var),
5181 offset_ptr->var);
5183 else
5184 arg_size_ptr->constant = (-initial_offset_ptr->constant
5185 - offset_ptr->constant);
5187 #else /* !ARGS_GROW_DOWNWARD */
5188 if (!in_regs
5189 #ifdef REG_PARM_STACK_SPACE
5190 || REG_PARM_STACK_SPACE (fndecl) > 0
5191 #endif
5193 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5194 *offset_ptr = *initial_offset_ptr;
5196 #ifdef PUSH_ROUNDING
5197 if (passed_mode != BLKmode)
5198 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5199 #endif
5201 /* Pad_below needs the pre-rounded size to know how much to pad below
5202 so this must be done before rounding up. */
5203 if (where_pad == downward
5204 /* However, BLKmode args passed in regs have their padding done elsewhere.
5205 The stack slot must be able to hold the entire register. */
5206 && !(in_regs && passed_mode == BLKmode))
5207 pad_below (offset_ptr, passed_mode, sizetree);
5209 if (where_pad != none
5210 && (!host_integerp (sizetree, 1)
5211 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5212 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5214 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5215 #endif /* ARGS_GROW_DOWNWARD */
5218 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5219 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5221 static void
5222 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5223 struct args_size *offset_ptr;
5224 int boundary;
5225 struct args_size *alignment_pad;
5227 tree save_var = NULL_TREE;
5228 HOST_WIDE_INT save_constant = 0;
5230 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5232 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5234 save_var = offset_ptr->var;
5235 save_constant = offset_ptr->constant;
5238 alignment_pad->var = NULL_TREE;
5239 alignment_pad->constant = 0;
5241 if (boundary > BITS_PER_UNIT)
5243 if (offset_ptr->var)
5245 offset_ptr->var =
5246 #ifdef ARGS_GROW_DOWNWARD
5247 round_down
5248 #else
5249 round_up
5250 #endif
5251 (ARGS_SIZE_TREE (*offset_ptr),
5252 boundary / BITS_PER_UNIT);
5253 offset_ptr->constant = 0; /*?*/
5254 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5255 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5256 save_var);
5258 else
5260 offset_ptr->constant =
5261 #ifdef ARGS_GROW_DOWNWARD
5262 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5263 #else
5264 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5265 #endif
5266 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5267 alignment_pad->constant = offset_ptr->constant - save_constant;
5272 #ifndef ARGS_GROW_DOWNWARD
5273 static void
5274 pad_below (offset_ptr, passed_mode, sizetree)
5275 struct args_size *offset_ptr;
5276 enum machine_mode passed_mode;
5277 tree sizetree;
5279 if (passed_mode != BLKmode)
5281 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5282 offset_ptr->constant
5283 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5284 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5285 - GET_MODE_SIZE (passed_mode));
5287 else
5289 if (TREE_CODE (sizetree) != INTEGER_CST
5290 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5292 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5293 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5294 /* Add it in. */
5295 ADD_PARM_SIZE (*offset_ptr, s2);
5296 SUB_PARM_SIZE (*offset_ptr, sizetree);
5300 #endif
5302 /* Walk the tree of blocks describing the binding levels within a function
5303 and warn about uninitialized variables.
5304 This is done after calling flow_analysis and before global_alloc
5305 clobbers the pseudo-regs to hard regs. */
5307 void
5308 uninitialized_vars_warning (block)
5309 tree block;
5311 register tree decl, sub;
5312 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5314 if (warn_uninitialized
5315 && TREE_CODE (decl) == VAR_DECL
5316 /* These warnings are unreliable for and aggregates
5317 because assigning the fields one by one can fail to convince
5318 flow.c that the entire aggregate was initialized.
5319 Unions are troublesome because members may be shorter. */
5320 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5321 && DECL_RTL (decl) != 0
5322 && GET_CODE (DECL_RTL (decl)) == REG
5323 /* Global optimizations can make it difficult to determine if a
5324 particular variable has been initialized. However, a VAR_DECL
5325 with a nonzero DECL_INITIAL had an initializer, so do not
5326 claim it is potentially uninitialized.
5328 We do not care about the actual value in DECL_INITIAL, so we do
5329 not worry that it may be a dangling pointer. */
5330 && DECL_INITIAL (decl) == NULL_TREE
5331 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5332 warning_with_decl (decl,
5333 "`%s' might be used uninitialized in this function");
5334 if (extra_warnings
5335 && TREE_CODE (decl) == VAR_DECL
5336 && DECL_RTL (decl) != 0
5337 && GET_CODE (DECL_RTL (decl)) == REG
5338 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5339 warning_with_decl (decl,
5340 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5342 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5343 uninitialized_vars_warning (sub);
5346 /* Do the appropriate part of uninitialized_vars_warning
5347 but for arguments instead of local variables. */
5349 void
5350 setjmp_args_warning ()
5352 register tree decl;
5353 for (decl = DECL_ARGUMENTS (current_function_decl);
5354 decl; decl = TREE_CHAIN (decl))
5355 if (DECL_RTL (decl) != 0
5356 && GET_CODE (DECL_RTL (decl)) == REG
5357 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5358 warning_with_decl (decl,
5359 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5362 /* If this function call setjmp, put all vars into the stack
5363 unless they were declared `register'. */
5365 void
5366 setjmp_protect (block)
5367 tree block;
5369 register tree decl, sub;
5370 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5371 if ((TREE_CODE (decl) == VAR_DECL
5372 || TREE_CODE (decl) == PARM_DECL)
5373 && DECL_RTL (decl) != 0
5374 && (GET_CODE (DECL_RTL (decl)) == REG
5375 || (GET_CODE (DECL_RTL (decl)) == MEM
5376 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5377 /* If this variable came from an inline function, it must be
5378 that its life doesn't overlap the setjmp. If there was a
5379 setjmp in the function, it would already be in memory. We
5380 must exclude such variable because their DECL_RTL might be
5381 set to strange things such as virtual_stack_vars_rtx. */
5382 && ! DECL_FROM_INLINE (decl)
5383 && (
5384 #ifdef NON_SAVING_SETJMP
5385 /* If longjmp doesn't restore the registers,
5386 don't put anything in them. */
5387 NON_SAVING_SETJMP
5389 #endif
5390 ! DECL_REGISTER (decl)))
5391 put_var_into_stack (decl);
5392 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5393 setjmp_protect (sub);
5396 /* Like the previous function, but for args instead of local variables. */
5398 void
5399 setjmp_protect_args ()
5401 register tree decl;
5402 for (decl = DECL_ARGUMENTS (current_function_decl);
5403 decl; decl = TREE_CHAIN (decl))
5404 if ((TREE_CODE (decl) == VAR_DECL
5405 || TREE_CODE (decl) == PARM_DECL)
5406 && DECL_RTL (decl) != 0
5407 && (GET_CODE (DECL_RTL (decl)) == REG
5408 || (GET_CODE (DECL_RTL (decl)) == MEM
5409 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5410 && (
5411 /* If longjmp doesn't restore the registers,
5412 don't put anything in them. */
5413 #ifdef NON_SAVING_SETJMP
5414 NON_SAVING_SETJMP
5416 #endif
5417 ! DECL_REGISTER (decl)))
5418 put_var_into_stack (decl);
5421 /* Return the context-pointer register corresponding to DECL,
5422 or 0 if it does not need one. */
5425 lookup_static_chain (decl)
5426 tree decl;
5428 tree context = decl_function_context (decl);
5429 tree link;
5431 if (context == 0
5432 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5433 return 0;
5435 /* We treat inline_function_decl as an alias for the current function
5436 because that is the inline function whose vars, types, etc.
5437 are being merged into the current function.
5438 See expand_inline_function. */
5439 if (context == current_function_decl || context == inline_function_decl)
5440 return virtual_stack_vars_rtx;
5442 for (link = context_display; link; link = TREE_CHAIN (link))
5443 if (TREE_PURPOSE (link) == context)
5444 return RTL_EXPR_RTL (TREE_VALUE (link));
5446 abort ();
5449 /* Convert a stack slot address ADDR for variable VAR
5450 (from a containing function)
5451 into an address valid in this function (using a static chain). */
5454 fix_lexical_addr (addr, var)
5455 rtx addr;
5456 tree var;
5458 rtx basereg;
5459 HOST_WIDE_INT displacement;
5460 tree context = decl_function_context (var);
5461 struct function *fp;
5462 rtx base = 0;
5464 /* If this is the present function, we need not do anything. */
5465 if (context == current_function_decl || context == inline_function_decl)
5466 return addr;
5468 for (fp = outer_function_chain; fp; fp = fp->next)
5469 if (fp->decl == context)
5470 break;
5472 if (fp == 0)
5473 abort ();
5475 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5476 addr = XEXP (XEXP (addr, 0), 0);
5478 /* Decode given address as base reg plus displacement. */
5479 if (GET_CODE (addr) == REG)
5480 basereg = addr, displacement = 0;
5481 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5482 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5483 else
5484 abort ();
5486 /* We accept vars reached via the containing function's
5487 incoming arg pointer and via its stack variables pointer. */
5488 if (basereg == fp->internal_arg_pointer)
5490 /* If reached via arg pointer, get the arg pointer value
5491 out of that function's stack frame.
5493 There are two cases: If a separate ap is needed, allocate a
5494 slot in the outer function for it and dereference it that way.
5495 This is correct even if the real ap is actually a pseudo.
5496 Otherwise, just adjust the offset from the frame pointer to
5497 compensate. */
5499 #ifdef NEED_SEPARATE_AP
5500 rtx addr;
5502 if (fp->x_arg_pointer_save_area == 0)
5503 fp->x_arg_pointer_save_area
5504 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5506 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5507 addr = memory_address (Pmode, addr);
5509 base = gen_rtx_MEM (Pmode, addr);
5510 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5511 base = copy_to_reg (base);
5512 #else
5513 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5514 base = lookup_static_chain (var);
5515 #endif
5518 else if (basereg == virtual_stack_vars_rtx)
5520 /* This is the same code as lookup_static_chain, duplicated here to
5521 avoid an extra call to decl_function_context. */
5522 tree link;
5524 for (link = context_display; link; link = TREE_CHAIN (link))
5525 if (TREE_PURPOSE (link) == context)
5527 base = RTL_EXPR_RTL (TREE_VALUE (link));
5528 break;
5532 if (base == 0)
5533 abort ();
5535 /* Use same offset, relative to appropriate static chain or argument
5536 pointer. */
5537 return plus_constant (base, displacement);
5540 /* Return the address of the trampoline for entering nested fn FUNCTION.
5541 If necessary, allocate a trampoline (in the stack frame)
5542 and emit rtl to initialize its contents (at entry to this function). */
5545 trampoline_address (function)
5546 tree function;
5548 tree link;
5549 tree rtlexp;
5550 rtx tramp;
5551 struct function *fp;
5552 tree fn_context;
5554 /* Find an existing trampoline and return it. */
5555 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5556 if (TREE_PURPOSE (link) == function)
5557 return
5558 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5560 for (fp = outer_function_chain; fp; fp = fp->next)
5561 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5562 if (TREE_PURPOSE (link) == function)
5564 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5565 function);
5566 return adjust_trampoline_addr (tramp);
5569 /* None exists; we must make one. */
5571 /* Find the `struct function' for the function containing FUNCTION. */
5572 fp = 0;
5573 fn_context = decl_function_context (function);
5574 if (fn_context != current_function_decl
5575 && fn_context != inline_function_decl)
5576 for (fp = outer_function_chain; fp; fp = fp->next)
5577 if (fp->decl == fn_context)
5578 break;
5580 /* Allocate run-time space for this trampoline
5581 (usually in the defining function's stack frame). */
5582 #ifdef ALLOCATE_TRAMPOLINE
5583 tramp = ALLOCATE_TRAMPOLINE (fp);
5584 #else
5585 /* If rounding needed, allocate extra space
5586 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5587 #ifdef TRAMPOLINE_ALIGNMENT
5588 #define TRAMPOLINE_REAL_SIZE \
5589 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5590 #else
5591 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5592 #endif
5593 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5594 fp ? fp : cfun);
5595 #endif
5597 /* Record the trampoline for reuse and note it for later initialization
5598 by expand_function_end. */
5599 if (fp != 0)
5601 rtlexp = make_node (RTL_EXPR);
5602 RTL_EXPR_RTL (rtlexp) = tramp;
5603 fp->x_trampoline_list = tree_cons (function, rtlexp,
5604 fp->x_trampoline_list);
5606 else
5608 /* Make the RTL_EXPR node temporary, not momentary, so that the
5609 trampoline_list doesn't become garbage. */
5610 rtlexp = make_node (RTL_EXPR);
5612 RTL_EXPR_RTL (rtlexp) = tramp;
5613 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5616 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5617 return adjust_trampoline_addr (tramp);
5620 /* Given a trampoline address,
5621 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5623 static rtx
5624 round_trampoline_addr (tramp)
5625 rtx tramp;
5627 #ifdef TRAMPOLINE_ALIGNMENT
5628 /* Round address up to desired boundary. */
5629 rtx temp = gen_reg_rtx (Pmode);
5630 temp = expand_binop (Pmode, add_optab, tramp,
5631 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5632 temp, 0, OPTAB_LIB_WIDEN);
5633 tramp = expand_binop (Pmode, and_optab, temp,
5634 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5635 temp, 0, OPTAB_LIB_WIDEN);
5636 #endif
5637 return tramp;
5640 /* Given a trampoline address, round it then apply any
5641 platform-specific adjustments so that the result can be used for a
5642 function call . */
5644 static rtx
5645 adjust_trampoline_addr (tramp)
5646 rtx tramp;
5648 tramp = round_trampoline_addr (tramp);
5649 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5650 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5651 #endif
5652 return tramp;
5655 /* Put all this function's BLOCK nodes including those that are chained
5656 onto the first block into a vector, and return it.
5657 Also store in each NOTE for the beginning or end of a block
5658 the index of that block in the vector.
5659 The arguments are BLOCK, the chain of top-level blocks of the function,
5660 and INSNS, the insn chain of the function. */
5662 void
5663 identify_blocks ()
5665 int n_blocks;
5666 tree *block_vector, *last_block_vector;
5667 tree *block_stack;
5668 tree block = DECL_INITIAL (current_function_decl);
5670 if (block == 0)
5671 return;
5673 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5674 depth-first order. */
5675 block_vector = get_block_vector (block, &n_blocks);
5676 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5678 last_block_vector = identify_blocks_1 (get_insns (),
5679 block_vector + 1,
5680 block_vector + n_blocks,
5681 block_stack);
5683 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5684 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5685 if (0 && last_block_vector != block_vector + n_blocks)
5686 abort ();
5688 free (block_vector);
5689 free (block_stack);
5692 /* Subroutine of identify_blocks. Do the block substitution on the
5693 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5695 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5696 BLOCK_VECTOR is incremented for each block seen. */
5698 static tree *
5699 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5700 rtx insns;
5701 tree *block_vector;
5702 tree *end_block_vector;
5703 tree *orig_block_stack;
5705 rtx insn;
5706 tree *block_stack = orig_block_stack;
5708 for (insn = insns; insn; insn = NEXT_INSN (insn))
5710 if (GET_CODE (insn) == NOTE)
5712 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5714 tree b;
5716 /* If there are more block notes than BLOCKs, something
5717 is badly wrong. */
5718 if (block_vector == end_block_vector)
5719 abort ();
5721 b = *block_vector++;
5722 NOTE_BLOCK (insn) = b;
5723 *block_stack++ = b;
5725 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5727 /* If there are more NOTE_INSN_BLOCK_ENDs than
5728 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5729 if (block_stack == orig_block_stack)
5730 abort ();
5732 NOTE_BLOCK (insn) = *--block_stack;
5735 else if (GET_CODE (insn) == CALL_INSN
5736 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5738 rtx cp = PATTERN (insn);
5740 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5741 end_block_vector, block_stack);
5742 if (XEXP (cp, 1))
5743 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5744 end_block_vector, block_stack);
5745 if (XEXP (cp, 2))
5746 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5747 end_block_vector, block_stack);
5751 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5752 something is badly wrong. */
5753 if (block_stack != orig_block_stack)
5754 abort ();
5756 return block_vector;
5759 /* Identify BLOCKs referenced by more than one
5760 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5762 void
5763 reorder_blocks ()
5765 tree block = DECL_INITIAL (current_function_decl);
5766 varray_type block_stack;
5768 if (block == NULL_TREE)
5769 return;
5771 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5773 /* Prune the old trees away, so that they don't get in the way. */
5774 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5775 BLOCK_CHAIN (block) = NULL_TREE;
5777 reorder_blocks_1 (get_insns (), block, &block_stack);
5779 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5781 VARRAY_FREE (block_stack);
5784 /* Helper function for reorder_blocks. Process the insn chain beginning
5785 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5787 static void
5788 reorder_blocks_1 (insns, current_block, p_block_stack)
5789 rtx insns;
5790 tree current_block;
5791 varray_type *p_block_stack;
5793 rtx insn;
5795 for (insn = insns; insn; insn = NEXT_INSN (insn))
5797 if (GET_CODE (insn) == NOTE)
5799 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5801 tree block = NOTE_BLOCK (insn);
5802 /* If we have seen this block before, copy it. */
5803 if (TREE_ASM_WRITTEN (block))
5805 block = copy_node (block);
5806 NOTE_BLOCK (insn) = block;
5808 BLOCK_SUBBLOCKS (block) = 0;
5809 TREE_ASM_WRITTEN (block) = 1;
5810 BLOCK_SUPERCONTEXT (block) = current_block;
5811 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5812 BLOCK_SUBBLOCKS (current_block) = block;
5813 current_block = block;
5814 VARRAY_PUSH_TREE (*p_block_stack, block);
5816 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5818 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5819 VARRAY_POP (*p_block_stack);
5820 BLOCK_SUBBLOCKS (current_block)
5821 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5822 current_block = BLOCK_SUPERCONTEXT (current_block);
5825 else if (GET_CODE (insn) == CALL_INSN
5826 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5828 rtx cp = PATTERN (insn);
5829 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5830 if (XEXP (cp, 1))
5831 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5832 if (XEXP (cp, 2))
5833 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5838 /* Reverse the order of elements in the chain T of blocks,
5839 and return the new head of the chain (old last element). */
5841 static tree
5842 blocks_nreverse (t)
5843 tree t;
5845 register tree prev = 0, decl, next;
5846 for (decl = t; decl; decl = next)
5848 next = BLOCK_CHAIN (decl);
5849 BLOCK_CHAIN (decl) = prev;
5850 prev = decl;
5852 return prev;
5855 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5856 non-NULL, list them all into VECTOR, in a depth-first preorder
5857 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5858 blocks. */
5860 static int
5861 all_blocks (block, vector)
5862 tree block;
5863 tree *vector;
5865 int n_blocks = 0;
5867 while (block)
5869 TREE_ASM_WRITTEN (block) = 0;
5871 /* Record this block. */
5872 if (vector)
5873 vector[n_blocks] = block;
5875 ++n_blocks;
5877 /* Record the subblocks, and their subblocks... */
5878 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5879 vector ? vector + n_blocks : 0);
5880 block = BLOCK_CHAIN (block);
5883 return n_blocks;
5886 /* Return a vector containing all the blocks rooted at BLOCK. The
5887 number of elements in the vector is stored in N_BLOCKS_P. The
5888 vector is dynamically allocated; it is the caller's responsibility
5889 to call `free' on the pointer returned. */
5891 static tree *
5892 get_block_vector (block, n_blocks_p)
5893 tree block;
5894 int *n_blocks_p;
5896 tree *block_vector;
5898 *n_blocks_p = all_blocks (block, NULL);
5899 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5900 all_blocks (block, block_vector);
5902 return block_vector;
5905 static int next_block_index = 2;
5907 /* Set BLOCK_NUMBER for all the blocks in FN. */
5909 void
5910 number_blocks (fn)
5911 tree fn;
5913 int i;
5914 int n_blocks;
5915 tree *block_vector;
5917 /* For SDB and XCOFF debugging output, we start numbering the blocks
5918 from 1 within each function, rather than keeping a running
5919 count. */
5920 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5921 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5922 next_block_index = 1;
5923 #endif
5925 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
5927 /* The top-level BLOCK isn't numbered at all. */
5928 for (i = 1; i < n_blocks; ++i)
5929 /* We number the blocks from two. */
5930 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
5932 free (block_vector);
5934 return;
5937 /* Allocate a function structure and reset its contents to the defaults. */
5938 static void
5939 prepare_function_start ()
5941 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5943 init_stmt_for_function ();
5944 init_eh_for_function ();
5946 cse_not_expected = ! optimize;
5948 /* Caller save not needed yet. */
5949 caller_save_needed = 0;
5951 /* No stack slots have been made yet. */
5952 stack_slot_list = 0;
5954 current_function_has_nonlocal_label = 0;
5955 current_function_has_nonlocal_goto = 0;
5957 /* There is no stack slot for handling nonlocal gotos. */
5958 nonlocal_goto_handler_slots = 0;
5959 nonlocal_goto_stack_level = 0;
5961 /* No labels have been declared for nonlocal use. */
5962 nonlocal_labels = 0;
5963 nonlocal_goto_handler_labels = 0;
5965 /* No function calls so far in this function. */
5966 function_call_count = 0;
5968 /* No parm regs have been allocated.
5969 (This is important for output_inline_function.) */
5970 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5972 /* Initialize the RTL mechanism. */
5973 init_emit ();
5975 /* Initialize the queue of pending postincrement and postdecrements,
5976 and some other info in expr.c. */
5977 init_expr ();
5979 /* We haven't done register allocation yet. */
5980 reg_renumber = 0;
5982 init_varasm_status (cfun);
5984 /* Clear out data used for inlining. */
5985 cfun->inlinable = 0;
5986 cfun->original_decl_initial = 0;
5987 cfun->original_arg_vector = 0;
5989 #ifdef STACK_BOUNDARY
5990 cfun->stack_alignment_needed = STACK_BOUNDARY;
5991 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5992 #else
5993 cfun->stack_alignment_needed = 0;
5994 cfun->preferred_stack_boundary = 0;
5995 #endif
5997 /* Set if a call to setjmp is seen. */
5998 current_function_calls_setjmp = 0;
6000 /* Set if a call to longjmp is seen. */
6001 current_function_calls_longjmp = 0;
6003 current_function_calls_alloca = 0;
6004 current_function_contains_functions = 0;
6005 current_function_is_leaf = 0;
6006 current_function_nothrow = 0;
6007 current_function_sp_is_unchanging = 0;
6008 current_function_uses_only_leaf_regs = 0;
6009 current_function_has_computed_jump = 0;
6010 current_function_is_thunk = 0;
6012 current_function_returns_pcc_struct = 0;
6013 current_function_returns_struct = 0;
6014 current_function_epilogue_delay_list = 0;
6015 current_function_uses_const_pool = 0;
6016 current_function_uses_pic_offset_table = 0;
6017 current_function_cannot_inline = 0;
6019 /* We have not yet needed to make a label to jump to for tail-recursion. */
6020 tail_recursion_label = 0;
6022 /* We haven't had a need to make a save area for ap yet. */
6023 arg_pointer_save_area = 0;
6025 /* No stack slots allocated yet. */
6026 frame_offset = 0;
6028 /* No SAVE_EXPRs in this function yet. */
6029 save_expr_regs = 0;
6031 /* No RTL_EXPRs in this function yet. */
6032 rtl_expr_chain = 0;
6034 /* Set up to allocate temporaries. */
6035 init_temp_slots ();
6037 /* Indicate that we need to distinguish between the return value of the
6038 present function and the return value of a function being called. */
6039 rtx_equal_function_value_matters = 1;
6041 /* Indicate that we have not instantiated virtual registers yet. */
6042 virtuals_instantiated = 0;
6044 /* Indicate that we want CONCATs now. */
6045 generating_concat_p = 1;
6047 /* Indicate we have no need of a frame pointer yet. */
6048 frame_pointer_needed = 0;
6050 /* By default assume not varargs or stdarg. */
6051 current_function_varargs = 0;
6052 current_function_stdarg = 0;
6054 /* We haven't made any trampolines for this function yet. */
6055 trampoline_list = 0;
6057 init_pending_stack_adjust ();
6058 inhibit_defer_pop = 0;
6060 current_function_outgoing_args_size = 0;
6062 if (init_lang_status)
6063 (*init_lang_status) (cfun);
6064 if (init_machine_status)
6065 (*init_machine_status) (cfun);
6068 /* Initialize the rtl expansion mechanism so that we can do simple things
6069 like generate sequences. This is used to provide a context during global
6070 initialization of some passes. */
6071 void
6072 init_dummy_function_start ()
6074 prepare_function_start ();
6077 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6078 and initialize static variables for generating RTL for the statements
6079 of the function. */
6081 void
6082 init_function_start (subr, filename, line)
6083 tree subr;
6084 const char *filename;
6085 int line;
6087 prepare_function_start ();
6089 /* Remember this function for later. */
6090 cfun->next_global = all_functions;
6091 all_functions = cfun;
6093 current_function_name = (*decl_printable_name) (subr, 2);
6094 cfun->decl = subr;
6096 /* Nonzero if this is a nested function that uses a static chain. */
6098 current_function_needs_context
6099 = (decl_function_context (current_function_decl) != 0
6100 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6102 /* Within function body, compute a type's size as soon it is laid out. */
6103 immediate_size_expand++;
6105 /* Prevent ever trying to delete the first instruction of a function.
6106 Also tell final how to output a linenum before the function prologue.
6107 Note linenums could be missing, e.g. when compiling a Java .class file. */
6108 if (line > 0)
6109 emit_line_note (filename, line);
6111 /* Make sure first insn is a note even if we don't want linenums.
6112 This makes sure the first insn will never be deleted.
6113 Also, final expects a note to appear there. */
6114 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6116 /* Set flags used by final.c. */
6117 if (aggregate_value_p (DECL_RESULT (subr)))
6119 #ifdef PCC_STATIC_STRUCT_RETURN
6120 current_function_returns_pcc_struct = 1;
6121 #endif
6122 current_function_returns_struct = 1;
6125 /* Warn if this value is an aggregate type,
6126 regardless of which calling convention we are using for it. */
6127 if (warn_aggregate_return
6128 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6129 warning ("function returns an aggregate");
6131 current_function_returns_pointer
6132 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6135 /* Make sure all values used by the optimization passes have sane
6136 defaults. */
6137 void
6138 init_function_for_compilation ()
6140 reg_renumber = 0;
6142 /* No prologue/epilogue insns yet. */
6143 VARRAY_GROW (prologue, 0);
6144 VARRAY_GROW (epilogue, 0);
6145 VARRAY_GROW (sibcall_epilogue, 0);
6148 /* Indicate that the current function uses extra args
6149 not explicitly mentioned in the argument list in any fashion. */
6151 void
6152 mark_varargs ()
6154 current_function_varargs = 1;
6157 /* Expand a call to __main at the beginning of a possible main function. */
6159 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6160 #undef HAS_INIT_SECTION
6161 #define HAS_INIT_SECTION
6162 #endif
6164 void
6165 expand_main_function ()
6167 #if !defined (HAS_INIT_SECTION)
6168 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6169 VOIDmode, 0);
6170 #endif /* not HAS_INIT_SECTION */
6173 extern struct obstack permanent_obstack;
6175 /* Start the RTL for a new function, and set variables used for
6176 emitting RTL.
6177 SUBR is the FUNCTION_DECL node.
6178 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6179 the function's parameters, which must be run at any return statement. */
6181 void
6182 expand_function_start (subr, parms_have_cleanups)
6183 tree subr;
6184 int parms_have_cleanups;
6186 tree tem;
6187 rtx last_ptr = NULL_RTX;
6189 /* Make sure volatile mem refs aren't considered
6190 valid operands of arithmetic insns. */
6191 init_recog_no_volatile ();
6193 /* Set this before generating any memory accesses. */
6194 current_function_check_memory_usage
6195 = (flag_check_memory_usage
6196 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6198 current_function_instrument_entry_exit
6199 = (flag_instrument_function_entry_exit
6200 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6202 current_function_limit_stack
6203 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6205 /* If function gets a static chain arg, store it in the stack frame.
6206 Do this first, so it gets the first stack slot offset. */
6207 if (current_function_needs_context)
6209 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6211 /* Delay copying static chain if it is not a register to avoid
6212 conflicts with regs used for parameters. */
6213 if (! SMALL_REGISTER_CLASSES
6214 || GET_CODE (static_chain_incoming_rtx) == REG)
6215 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6218 /* If the parameters of this function need cleaning up, get a label
6219 for the beginning of the code which executes those cleanups. This must
6220 be done before doing anything with return_label. */
6221 if (parms_have_cleanups)
6222 cleanup_label = gen_label_rtx ();
6223 else
6224 cleanup_label = 0;
6226 /* Make the label for return statements to jump to, if this machine
6227 does not have a one-instruction return and uses an epilogue,
6228 or if it returns a structure, or if it has parm cleanups. */
6229 #ifdef HAVE_return
6230 if (cleanup_label == 0 && HAVE_return
6231 && ! current_function_instrument_entry_exit
6232 && ! current_function_returns_pcc_struct
6233 && ! (current_function_returns_struct && ! optimize))
6234 return_label = 0;
6235 else
6236 return_label = gen_label_rtx ();
6237 #else
6238 return_label = gen_label_rtx ();
6239 #endif
6241 /* Initialize rtx used to return the value. */
6242 /* Do this before assign_parms so that we copy the struct value address
6243 before any library calls that assign parms might generate. */
6245 /* Decide whether to return the value in memory or in a register. */
6246 if (aggregate_value_p (DECL_RESULT (subr)))
6248 /* Returning something that won't go in a register. */
6249 register rtx value_address = 0;
6251 #ifdef PCC_STATIC_STRUCT_RETURN
6252 if (current_function_returns_pcc_struct)
6254 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6255 value_address = assemble_static_space (size);
6257 else
6258 #endif
6260 /* Expect to be passed the address of a place to store the value.
6261 If it is passed as an argument, assign_parms will take care of
6262 it. */
6263 if (struct_value_incoming_rtx)
6265 value_address = gen_reg_rtx (Pmode);
6266 emit_move_insn (value_address, struct_value_incoming_rtx);
6269 if (value_address)
6271 DECL_RTL (DECL_RESULT (subr))
6272 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6273 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6274 DECL_RESULT (subr), 1);
6277 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6278 /* If return mode is void, this decl rtl should not be used. */
6279 DECL_RTL (DECL_RESULT (subr)) = 0;
6280 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6282 /* If function will end with cleanup code for parms,
6283 compute the return values into a pseudo reg,
6284 which we will copy into the true return register
6285 after the cleanups are done. */
6287 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6289 #ifdef PROMOTE_FUNCTION_RETURN
6290 tree type = TREE_TYPE (DECL_RESULT (subr));
6291 int unsignedp = TREE_UNSIGNED (type);
6293 mode = promote_mode (type, mode, &unsignedp, 1);
6294 #endif
6296 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6298 else
6299 /* Scalar, returned in a register. */
6301 DECL_RTL (DECL_RESULT (subr))
6302 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
6304 /* Mark this reg as the function's return value. */
6305 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6307 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6308 /* Needed because we may need to move this to memory
6309 in case it's a named return value whose address is taken. */
6310 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6314 /* Initialize rtx for parameters and local variables.
6315 In some cases this requires emitting insns. */
6317 assign_parms (subr);
6319 /* Copy the static chain now if it wasn't a register. The delay is to
6320 avoid conflicts with the parameter passing registers. */
6322 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6323 if (GET_CODE (static_chain_incoming_rtx) != REG)
6324 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6326 /* The following was moved from init_function_start.
6327 The move is supposed to make sdb output more accurate. */
6328 /* Indicate the beginning of the function body,
6329 as opposed to parm setup. */
6330 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6332 if (GET_CODE (get_last_insn ()) != NOTE)
6333 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6334 parm_birth_insn = get_last_insn ();
6336 context_display = 0;
6337 if (current_function_needs_context)
6339 /* Fetch static chain values for containing functions. */
6340 tem = decl_function_context (current_function_decl);
6341 /* Copy the static chain pointer into a pseudo. If we have
6342 small register classes, copy the value from memory if
6343 static_chain_incoming_rtx is a REG. */
6344 if (tem)
6346 /* If the static chain originally came in a register, put it back
6347 there, then move it out in the next insn. The reason for
6348 this peculiar code is to satisfy function integration. */
6349 if (SMALL_REGISTER_CLASSES
6350 && GET_CODE (static_chain_incoming_rtx) == REG)
6351 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6352 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6355 while (tem)
6357 tree rtlexp = make_node (RTL_EXPR);
6359 RTL_EXPR_RTL (rtlexp) = last_ptr;
6360 context_display = tree_cons (tem, rtlexp, context_display);
6361 tem = decl_function_context (tem);
6362 if (tem == 0)
6363 break;
6364 /* Chain thru stack frames, assuming pointer to next lexical frame
6365 is found at the place we always store it. */
6366 #ifdef FRAME_GROWS_DOWNWARD
6367 last_ptr = plus_constant (last_ptr,
6368 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6369 #endif
6370 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6371 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6372 last_ptr = copy_to_reg (last_ptr);
6374 /* If we are not optimizing, ensure that we know that this
6375 piece of context is live over the entire function. */
6376 if (! optimize)
6377 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6378 save_expr_regs);
6382 if (current_function_instrument_entry_exit)
6384 rtx fun = DECL_RTL (current_function_decl);
6385 if (GET_CODE (fun) == MEM)
6386 fun = XEXP (fun, 0);
6387 else
6388 abort ();
6389 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6390 fun, Pmode,
6391 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6393 hard_frame_pointer_rtx),
6394 Pmode);
6397 /* After the display initializations is where the tail-recursion label
6398 should go, if we end up needing one. Ensure we have a NOTE here
6399 since some things (like trampolines) get placed before this. */
6400 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6402 /* Evaluate now the sizes of any types declared among the arguments. */
6403 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6405 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6406 EXPAND_MEMORY_USE_BAD);
6407 /* Flush the queue in case this parameter declaration has
6408 side-effects. */
6409 emit_queue ();
6412 /* Make sure there is a line number after the function entry setup code. */
6413 force_next_line_note ();
6416 /* Undo the effects of init_dummy_function_start. */
6417 void
6418 expand_dummy_function_end ()
6420 /* End any sequences that failed to be closed due to syntax errors. */
6421 while (in_sequence_p ())
6422 end_sequence ();
6424 /* Outside function body, can't compute type's actual size
6425 until next function's body starts. */
6427 free_after_parsing (cfun);
6428 free_after_compilation (cfun);
6429 free (cfun);
6430 cfun = 0;
6433 /* Call DOIT for each hard register used as a return value from
6434 the current function. */
6436 void
6437 diddle_return_value (doit, arg)
6438 void (*doit) PARAMS ((rtx, void *));
6439 void *arg;
6441 rtx outgoing = current_function_return_rtx;
6442 int pcc;
6444 if (! outgoing)
6445 return;
6447 pcc = (current_function_returns_struct
6448 || current_function_returns_pcc_struct);
6450 if ((GET_CODE (outgoing) == REG
6451 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6452 || pcc)
6454 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6456 /* A PCC-style return returns a pointer to the memory in which
6457 the structure is stored. */
6458 if (pcc)
6459 type = build_pointer_type (type);
6461 #ifdef FUNCTION_OUTGOING_VALUE
6462 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6463 #else
6464 outgoing = FUNCTION_VALUE (type, current_function_decl);
6465 #endif
6466 /* If this is a BLKmode structure being returned in registers, then use
6467 the mode computed in expand_return. */
6468 if (GET_MODE (outgoing) == BLKmode)
6469 PUT_MODE (outgoing, GET_MODE (current_function_return_rtx));
6470 REG_FUNCTION_VALUE_P (outgoing) = 1;
6473 if (GET_CODE (outgoing) == REG)
6474 (*doit) (outgoing, arg);
6475 else if (GET_CODE (outgoing) == PARALLEL)
6477 int i;
6479 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6481 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6483 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6484 (*doit) (x, arg);
6489 static void
6490 do_clobber_return_reg (reg, arg)
6491 rtx reg;
6492 void *arg ATTRIBUTE_UNUSED;
6494 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6497 void
6498 clobber_return_register ()
6500 diddle_return_value (do_clobber_return_reg, NULL);
6503 static void
6504 do_use_return_reg (reg, arg)
6505 rtx reg;
6506 void *arg ATTRIBUTE_UNUSED;
6508 emit_insn (gen_rtx_USE (VOIDmode, reg));
6511 void
6512 use_return_register ()
6514 diddle_return_value (do_use_return_reg, NULL);
6517 /* Generate RTL for the end of the current function.
6518 FILENAME and LINE are the current position in the source file.
6520 It is up to language-specific callers to do cleanups for parameters--
6521 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6523 void
6524 expand_function_end (filename, line, end_bindings)
6525 const char *filename;
6526 int line;
6527 int end_bindings;
6529 tree link;
6531 #ifdef TRAMPOLINE_TEMPLATE
6532 static rtx initial_trampoline;
6533 #endif
6535 finish_expr_for_function ();
6537 #ifdef NON_SAVING_SETJMP
6538 /* Don't put any variables in registers if we call setjmp
6539 on a machine that fails to restore the registers. */
6540 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6542 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6543 setjmp_protect (DECL_INITIAL (current_function_decl));
6545 setjmp_protect_args ();
6547 #endif
6549 /* Save the argument pointer if a save area was made for it. */
6550 if (arg_pointer_save_area)
6552 /* arg_pointer_save_area may not be a valid memory address, so we
6553 have to check it and fix it if necessary. */
6554 rtx seq;
6555 start_sequence ();
6556 emit_move_insn (validize_mem (arg_pointer_save_area),
6557 virtual_incoming_args_rtx);
6558 seq = gen_sequence ();
6559 end_sequence ();
6560 emit_insn_before (seq, tail_recursion_reentry);
6563 /* Initialize any trampolines required by this function. */
6564 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6566 tree function = TREE_PURPOSE (link);
6567 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6568 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6569 #ifdef TRAMPOLINE_TEMPLATE
6570 rtx blktramp;
6571 #endif
6572 rtx seq;
6574 #ifdef TRAMPOLINE_TEMPLATE
6575 /* First make sure this compilation has a template for
6576 initializing trampolines. */
6577 if (initial_trampoline == 0)
6579 initial_trampoline
6580 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6582 ggc_add_rtx_root (&initial_trampoline, 1);
6584 #endif
6586 /* Generate insns to initialize the trampoline. */
6587 start_sequence ();
6588 tramp = round_trampoline_addr (XEXP (tramp, 0));
6589 #ifdef TRAMPOLINE_TEMPLATE
6590 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6591 emit_block_move (blktramp, initial_trampoline,
6592 GEN_INT (TRAMPOLINE_SIZE),
6593 TRAMPOLINE_ALIGNMENT);
6594 #endif
6595 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6596 seq = get_insns ();
6597 end_sequence ();
6599 /* Put those insns at entry to the containing function (this one). */
6600 emit_insns_before (seq, tail_recursion_reentry);
6603 /* If we are doing stack checking and this function makes calls,
6604 do a stack probe at the start of the function to ensure we have enough
6605 space for another stack frame. */
6606 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6608 rtx insn, seq;
6610 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6611 if (GET_CODE (insn) == CALL_INSN)
6613 start_sequence ();
6614 probe_stack_range (STACK_CHECK_PROTECT,
6615 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6616 seq = get_insns ();
6617 end_sequence ();
6618 emit_insns_before (seq, tail_recursion_reentry);
6619 break;
6623 /* Warn about unused parms if extra warnings were specified. */
6624 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6625 warning. WARN_UNUSED_PARAMETER is negative when set by
6626 -Wunused. */
6627 if (warn_unused_parameter > 0
6628 || (warn_unused_parameter < 0 && extra_warnings))
6630 tree decl;
6632 for (decl = DECL_ARGUMENTS (current_function_decl);
6633 decl; decl = TREE_CHAIN (decl))
6634 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6635 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6636 warning_with_decl (decl, "unused parameter `%s'");
6639 /* Delete handlers for nonlocal gotos if nothing uses them. */
6640 if (nonlocal_goto_handler_slots != 0
6641 && ! current_function_has_nonlocal_label)
6642 delete_handlers ();
6644 /* End any sequences that failed to be closed due to syntax errors. */
6645 while (in_sequence_p ())
6646 end_sequence ();
6648 /* Outside function body, can't compute type's actual size
6649 until next function's body starts. */
6650 immediate_size_expand--;
6652 clear_pending_stack_adjust ();
6653 do_pending_stack_adjust ();
6655 /* Mark the end of the function body.
6656 If control reaches this insn, the function can drop through
6657 without returning a value. */
6658 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6660 /* Must mark the last line number note in the function, so that the test
6661 coverage code can avoid counting the last line twice. This just tells
6662 the code to ignore the immediately following line note, since there
6663 already exists a copy of this note somewhere above. This line number
6664 note is still needed for debugging though, so we can't delete it. */
6665 if (flag_test_coverage)
6666 emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
6668 /* Output a linenumber for the end of the function.
6669 SDB depends on this. */
6670 emit_line_note_force (filename, line);
6672 /* Output the label for the actual return from the function,
6673 if one is expected. This happens either because a function epilogue
6674 is used instead of a return instruction, or because a return was done
6675 with a goto in order to run local cleanups, or because of pcc-style
6676 structure returning. */
6678 if (return_label)
6680 rtx before, after;
6682 /* Before the return label, clobber the return registers so that
6683 they are not propogated live to the rest of the function. This
6684 can only happen with functions that drop through; if there had
6685 been a return statement, there would have either been a return
6686 rtx, or a jump to the return label. */
6688 before = get_last_insn ();
6689 clobber_return_register ();
6690 after = get_last_insn ();
6692 if (before != after)
6693 cfun->x_clobber_return_insn = after;
6695 emit_label (return_label);
6698 /* C++ uses this. */
6699 if (end_bindings)
6700 expand_end_bindings (0, 0, 0);
6702 /* Now handle any leftover exception regions that may have been
6703 created for the parameters. */
6705 rtx last = get_last_insn ();
6706 rtx label;
6708 expand_leftover_cleanups ();
6710 /* If there are any catch_clauses remaining, output them now. */
6711 emit_insns (catch_clauses);
6712 catch_clauses = catch_clauses_last = NULL_RTX;
6713 /* If the above emitted any code, may sure we jump around it. */
6714 if (last != get_last_insn ())
6716 label = gen_label_rtx ();
6717 last = emit_jump_insn_after (gen_jump (label), last);
6718 last = emit_barrier_after (last);
6719 emit_label (label);
6723 if (current_function_instrument_entry_exit)
6725 rtx fun = DECL_RTL (current_function_decl);
6726 if (GET_CODE (fun) == MEM)
6727 fun = XEXP (fun, 0);
6728 else
6729 abort ();
6730 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6731 fun, Pmode,
6732 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6734 hard_frame_pointer_rtx),
6735 Pmode);
6738 /* If we had calls to alloca, and this machine needs
6739 an accurate stack pointer to exit the function,
6740 insert some code to save and restore the stack pointer. */
6741 #ifdef EXIT_IGNORE_STACK
6742 if (! EXIT_IGNORE_STACK)
6743 #endif
6744 if (current_function_calls_alloca)
6746 rtx tem = 0;
6748 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6749 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6752 /* If scalar return value was computed in a pseudo-reg, or was a named
6753 return value that got dumped to the stack, copy that to the hard
6754 return register. */
6755 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0)
6757 tree decl_result = DECL_RESULT (current_function_decl);
6758 rtx decl_rtl = DECL_RTL (decl_result);
6760 if (REG_P (decl_rtl)
6761 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6762 : DECL_REGISTER (decl_result))
6764 rtx real_decl_rtl;
6766 #ifdef FUNCTION_OUTGOING_VALUE
6767 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6768 current_function_decl);
6769 #else
6770 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6771 current_function_decl);
6772 #endif
6773 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6775 /* If this is a BLKmode structure being returned in registers,
6776 then use the mode computed in expand_return. Note that if
6777 decl_rtl is memory, then its mode may have been changed,
6778 but that current_function_return_rtx has not. */
6779 if (GET_MODE (real_decl_rtl) == BLKmode)
6780 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6782 /* If a named return value dumped decl_return to memory, then
6783 we may need to re-do the PROMOTE_MODE signed/unsigned
6784 extension. */
6785 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6787 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6789 #ifdef PROMOTE_FUNCTION_RETURN
6790 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6791 &unsignedp, 1);
6792 #endif
6794 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6796 else
6797 emit_move_insn (real_decl_rtl, decl_rtl);
6799 /* The delay slot scheduler assumes that current_function_return_rtx
6800 holds the hard register containing the return value, not a
6801 temporary pseudo. */
6802 current_function_return_rtx = real_decl_rtl;
6806 /* If returning a structure, arrange to return the address of the value
6807 in a place where debuggers expect to find it.
6809 If returning a structure PCC style,
6810 the caller also depends on this value.
6811 And current_function_returns_pcc_struct is not necessarily set. */
6812 if (current_function_returns_struct
6813 || current_function_returns_pcc_struct)
6815 rtx value_address =
6816 XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6817 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6818 #ifdef FUNCTION_OUTGOING_VALUE
6819 rtx outgoing
6820 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6821 current_function_decl);
6822 #else
6823 rtx outgoing
6824 = FUNCTION_VALUE (build_pointer_type (type),
6825 current_function_decl);
6826 #endif
6828 /* Mark this as a function return value so integrate will delete the
6829 assignment and USE below when inlining this function. */
6830 REG_FUNCTION_VALUE_P (outgoing) = 1;
6832 emit_move_insn (outgoing, value_address);
6835 /* ??? This should no longer be necessary since stupid is no longer with
6836 us, but there are some parts of the compiler (eg reload_combine, and
6837 sh mach_dep_reorg) that still try and compute their own lifetime info
6838 instead of using the general framework. */
6839 use_return_register ();
6841 /* If this is an implementation of __throw, do what's necessary to
6842 communicate between __builtin_eh_return and the epilogue. */
6843 expand_eh_return ();
6845 /* Output a return insn if we are using one.
6846 Otherwise, let the rtl chain end here, to drop through
6847 into the epilogue. */
6849 #ifdef HAVE_return
6850 if (HAVE_return)
6852 emit_jump_insn (gen_return ());
6853 emit_barrier ();
6855 #endif
6857 /* Fix up any gotos that jumped out to the outermost
6858 binding level of the function.
6859 Must follow emitting RETURN_LABEL. */
6861 /* If you have any cleanups to do at this point,
6862 and they need to create temporary variables,
6863 then you will lose. */
6864 expand_fixups (get_insns ());
6867 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6868 sequence or a single insn). */
6870 static void
6871 record_insns (insns, vecp)
6872 rtx insns;
6873 varray_type *vecp;
6875 if (GET_CODE (insns) == SEQUENCE)
6877 int len = XVECLEN (insns, 0);
6878 int i = VARRAY_SIZE (*vecp);
6880 VARRAY_GROW (*vecp, i + len);
6881 while (--len >= 0)
6883 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6884 ++i;
6887 else
6889 int i = VARRAY_SIZE (*vecp);
6890 VARRAY_GROW (*vecp, i + 1);
6891 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6895 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6897 static int
6898 contains (insn, vec)
6899 rtx insn;
6900 varray_type vec;
6902 register int i, j;
6904 if (GET_CODE (insn) == INSN
6905 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6907 int count = 0;
6908 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6909 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6910 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
6911 count++;
6912 return count;
6914 else
6916 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6917 if (INSN_UID (insn) == VARRAY_INT (vec, j))
6918 return 1;
6920 return 0;
6924 prologue_epilogue_contains (insn)
6925 rtx insn;
6927 if (contains (insn, prologue))
6928 return 1;
6929 if (contains (insn, epilogue))
6930 return 1;
6931 return 0;
6935 sibcall_epilogue_contains (insn)
6936 rtx insn;
6938 if (sibcall_epilogue)
6939 return contains (insn, sibcall_epilogue);
6940 return 0;
6943 #ifdef HAVE_return
6944 /* Insert gen_return at the end of block BB. This also means updating
6945 block_for_insn appropriately. */
6947 static void
6948 emit_return_into_block (bb, line_note)
6949 basic_block bb;
6950 rtx line_note;
6952 rtx p, end;
6954 p = NEXT_INSN (bb->end);
6955 end = emit_jump_insn_after (gen_return (), bb->end);
6956 if (line_note)
6957 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
6958 NOTE_LINE_NUMBER (line_note), bb->end);
6960 while (1)
6962 set_block_for_insn (p, bb);
6963 if (p == bb->end)
6964 break;
6965 p = PREV_INSN (p);
6967 bb->end = end;
6969 #endif /* HAVE_return */
6971 #ifdef HAVE_epilogue
6973 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
6974 to the stack pointer. */
6976 static void
6977 keep_stack_depressed (seq)
6978 rtx seq;
6980 int i;
6981 rtx sp_from_reg = 0;
6982 int sp_modified_unknown = 0;
6984 /* If the epilogue is just a single instruction, it's OK as is */
6986 if (GET_CODE (seq) != SEQUENCE) return;
6988 /* Scan all insns in SEQ looking for ones that modified the stack
6989 pointer. Record if it modified the stack pointer by copying it
6990 from the frame pointer or if it modified it in some other way.
6991 Then modify any subsequent stack pointer references to take that
6992 into account. We start by only allowing SP to be copied from a
6993 register (presumably FP) and then be subsequently referenced. */
6995 for (i = 0; i < XVECLEN (seq, 0); i++)
6997 rtx insn = XVECEXP (seq, 0, i);
6999 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7000 continue;
7002 if (reg_set_p (stack_pointer_rtx, insn))
7004 rtx set = single_set (insn);
7006 /* If SP is set as a side-effect, we can't support this. */
7007 if (set == 0)
7008 abort ();
7010 if (GET_CODE (SET_SRC (set)) == REG)
7011 sp_from_reg = SET_SRC (set);
7012 else
7013 sp_modified_unknown = 1;
7015 /* Don't allow the SP modification to happen. */
7016 PUT_CODE (insn, NOTE);
7017 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7018 NOTE_SOURCE_FILE (insn) = 0;
7020 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
7022 if (sp_modified_unknown)
7023 abort ();
7025 else if (sp_from_reg != 0)
7026 PATTERN (insn)
7027 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
7031 #endif
7033 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7034 this into place with notes indicating where the prologue ends and where
7035 the epilogue begins. Update the basic block information when possible. */
7037 void
7038 thread_prologue_and_epilogue_insns (f)
7039 rtx f ATTRIBUTE_UNUSED;
7041 int inserted = 0;
7042 edge e;
7043 rtx seq;
7044 #ifdef HAVE_prologue
7045 rtx prologue_end = NULL_RTX;
7046 #endif
7047 #if defined (HAVE_epilogue) || defined(HAVE_return)
7048 rtx epilogue_end = NULL_RTX;
7049 #endif
7051 #ifdef HAVE_prologue
7052 if (HAVE_prologue)
7054 start_sequence ();
7055 seq = gen_prologue ();
7056 emit_insn (seq);
7058 /* Retain a map of the prologue insns. */
7059 if (GET_CODE (seq) != SEQUENCE)
7060 seq = get_insns ();
7061 record_insns (seq, &prologue);
7062 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7064 seq = gen_sequence ();
7065 end_sequence ();
7067 /* If optimization is off, and perhaps in an empty function,
7068 the entry block will have no successors. */
7069 if (ENTRY_BLOCK_PTR->succ)
7071 /* Can't deal with multiple successsors of the entry block. */
7072 if (ENTRY_BLOCK_PTR->succ->succ_next)
7073 abort ();
7075 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7076 inserted = 1;
7078 else
7079 emit_insn_after (seq, f);
7081 #endif
7083 /* If the exit block has no non-fake predecessors, we don't need
7084 an epilogue. */
7085 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7086 if ((e->flags & EDGE_FAKE) == 0)
7087 break;
7088 if (e == NULL)
7089 goto epilogue_done;
7091 #ifdef HAVE_return
7092 if (optimize && HAVE_return)
7094 /* If we're allowed to generate a simple return instruction,
7095 then by definition we don't need a full epilogue. Examine
7096 the block that falls through to EXIT. If it does not
7097 contain any code, examine its predecessors and try to
7098 emit (conditional) return instructions. */
7100 basic_block last;
7101 edge e_next;
7102 rtx label;
7104 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7105 if (e->flags & EDGE_FALLTHRU)
7106 break;
7107 if (e == NULL)
7108 goto epilogue_done;
7109 last = e->src;
7111 /* Verify that there are no active instructions in the last block. */
7112 label = last->end;
7113 while (label && GET_CODE (label) != CODE_LABEL)
7115 if (active_insn_p (label))
7116 break;
7117 label = PREV_INSN (label);
7120 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7122 rtx epilogue_line_note = NULL_RTX;
7124 /* Locate the line number associated with the closing brace,
7125 if we can find one. */
7126 for (seq = get_last_insn ();
7127 seq && ! active_insn_p (seq);
7128 seq = PREV_INSN (seq))
7129 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7131 epilogue_line_note = seq;
7132 break;
7135 for (e = last->pred; e; e = e_next)
7137 basic_block bb = e->src;
7138 rtx jump;
7140 e_next = e->pred_next;
7141 if (bb == ENTRY_BLOCK_PTR)
7142 continue;
7144 jump = bb->end;
7145 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7146 continue;
7148 /* If we have an unconditional jump, we can replace that
7149 with a simple return instruction. */
7150 if (simplejump_p (jump))
7152 emit_return_into_block (bb, epilogue_line_note);
7153 flow_delete_insn (jump);
7156 /* If we have a conditional jump, we can try to replace
7157 that with a conditional return instruction. */
7158 else if (condjump_p (jump))
7160 rtx ret, *loc;
7162 ret = SET_SRC (PATTERN (jump));
7163 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7164 loc = &XEXP (ret, 1);
7165 else
7166 loc = &XEXP (ret, 2);
7167 ret = gen_rtx_RETURN (VOIDmode);
7169 if (! validate_change (jump, loc, ret, 0))
7170 continue;
7171 if (JUMP_LABEL (jump))
7172 LABEL_NUSES (JUMP_LABEL (jump))--;
7174 /* If this block has only one successor, it both jumps
7175 and falls through to the fallthru block, so we can't
7176 delete the edge. */
7177 if (bb->succ->succ_next == NULL)
7178 continue;
7180 else
7181 continue;
7183 /* Fix up the CFG for the successful change we just made. */
7184 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7187 /* Emit a return insn for the exit fallthru block. Whether
7188 this is still reachable will be determined later. */
7190 emit_barrier_after (last->end);
7191 emit_return_into_block (last, epilogue_line_note);
7192 epilogue_end = last->end;
7193 goto epilogue_done;
7196 #endif
7197 #ifdef HAVE_epilogue
7198 if (HAVE_epilogue)
7200 /* Find the edge that falls through to EXIT. Other edges may exist
7201 due to RETURN instructions, but those don't need epilogues.
7202 There really shouldn't be a mixture -- either all should have
7203 been converted or none, however... */
7205 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7206 if (e->flags & EDGE_FALLTHRU)
7207 break;
7208 if (e == NULL)
7209 goto epilogue_done;
7211 start_sequence ();
7212 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7214 seq = gen_epilogue ();
7216 /* If this function returns with the stack depressed, massage
7217 the epilogue to actually do that. */
7218 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7219 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7220 keep_stack_depressed (seq);
7222 emit_jump_insn (seq);
7224 /* Retain a map of the epilogue insns. */
7225 if (GET_CODE (seq) != SEQUENCE)
7226 seq = get_insns ();
7227 record_insns (seq, &epilogue);
7229 seq = gen_sequence ();
7230 end_sequence ();
7232 insert_insn_on_edge (seq, e);
7233 inserted = 1;
7235 #endif
7236 epilogue_done:
7238 if (inserted)
7239 commit_edge_insertions ();
7241 #ifdef HAVE_sibcall_epilogue
7242 /* Emit sibling epilogues before any sibling call sites. */
7243 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7245 basic_block bb = e->src;
7246 rtx insn = bb->end;
7247 rtx i;
7248 rtx newinsn;
7250 if (GET_CODE (insn) != CALL_INSN
7251 || ! SIBLING_CALL_P (insn))
7252 continue;
7254 start_sequence ();
7255 seq = gen_sibcall_epilogue ();
7256 end_sequence ();
7258 i = PREV_INSN (insn);
7259 newinsn = emit_insn_before (seq, insn);
7261 /* Update the UID to basic block map. */
7262 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7263 set_block_for_insn (i, bb);
7265 /* Retain a map of the epilogue insns. Used in life analysis to
7266 avoid getting rid of sibcall epilogue insns. */
7267 record_insns (GET_CODE (seq) == SEQUENCE
7268 ? seq : newinsn, &sibcall_epilogue);
7270 #endif
7272 #ifdef HAVE_prologue
7273 if (prologue_end)
7275 rtx insn, prev;
7277 /* GDB handles `break f' by setting a breakpoint on the first
7278 line note after the prologue. Which means (1) that if
7279 there are line number notes before where we inserted the
7280 prologue we should move them, and (2) we should generate a
7281 note before the end of the first basic block, if there isn't
7282 one already there. */
7284 for (insn = prologue_end; insn; insn = prev)
7286 prev = PREV_INSN (insn);
7287 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7289 /* Note that we cannot reorder the first insn in the
7290 chain, since rest_of_compilation relies on that
7291 remaining constant. */
7292 if (prev == NULL)
7293 break;
7294 reorder_insns (insn, insn, prologue_end);
7298 /* Find the last line number note in the first block. */
7299 for (insn = BASIC_BLOCK (0)->end;
7300 insn != prologue_end;
7301 insn = PREV_INSN (insn))
7302 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7303 break;
7305 /* If we didn't find one, make a copy of the first line number
7306 we run across. */
7307 if (! insn)
7309 for (insn = next_active_insn (prologue_end);
7310 insn;
7311 insn = PREV_INSN (insn))
7312 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7314 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7315 NOTE_LINE_NUMBER (insn),
7316 prologue_end);
7317 break;
7321 #endif
7322 #ifdef HAVE_epilogue
7323 if (epilogue_end)
7325 rtx insn, next;
7327 /* Similarly, move any line notes that appear after the epilogue.
7328 There is no need, however, to be quite so anal about the existance
7329 of such a note. */
7330 for (insn = epilogue_end; insn; insn = next)
7332 next = NEXT_INSN (insn);
7333 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7334 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7337 #endif
7340 /* Reposition the prologue-end and epilogue-begin notes after instruction
7341 scheduling and delayed branch scheduling. */
7343 void
7344 reposition_prologue_and_epilogue_notes (f)
7345 rtx f ATTRIBUTE_UNUSED;
7347 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7348 int len;
7350 if ((len = VARRAY_SIZE (prologue)) > 0)
7352 register rtx insn, note = 0;
7354 /* Scan from the beginning until we reach the last prologue insn.
7355 We apparently can't depend on basic_block_{head,end} after
7356 reorg has run. */
7357 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7359 if (GET_CODE (insn) == NOTE)
7361 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7362 note = insn;
7364 else if ((len -= contains (insn, prologue)) == 0)
7366 rtx next;
7367 /* Find the prologue-end note if we haven't already, and
7368 move it to just after the last prologue insn. */
7369 if (note == 0)
7371 for (note = insn; (note = NEXT_INSN (note));)
7372 if (GET_CODE (note) == NOTE
7373 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7374 break;
7377 next = NEXT_INSN (note);
7379 /* Whether or not we can depend on BLOCK_HEAD,
7380 attempt to keep it up-to-date. */
7381 if (BLOCK_HEAD (0) == note)
7382 BLOCK_HEAD (0) = next;
7384 remove_insn (note);
7385 add_insn_after (note, insn);
7390 if ((len = VARRAY_SIZE (epilogue)) > 0)
7392 register rtx insn, note = 0;
7394 /* Scan from the end until we reach the first epilogue insn.
7395 We apparently can't depend on basic_block_{head,end} after
7396 reorg has run. */
7397 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7399 if (GET_CODE (insn) == NOTE)
7401 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7402 note = insn;
7404 else if ((len -= contains (insn, epilogue)) == 0)
7406 /* Find the epilogue-begin note if we haven't already, and
7407 move it to just before the first epilogue insn. */
7408 if (note == 0)
7410 for (note = insn; (note = PREV_INSN (note));)
7411 if (GET_CODE (note) == NOTE
7412 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7413 break;
7416 /* Whether or not we can depend on BLOCK_HEAD,
7417 attempt to keep it up-to-date. */
7418 if (n_basic_blocks
7419 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7420 BLOCK_HEAD (n_basic_blocks-1) = note;
7422 remove_insn (note);
7423 add_insn_before (note, insn);
7427 #endif /* HAVE_prologue or HAVE_epilogue */
7430 /* Mark T for GC. */
7432 static void
7433 mark_temp_slot (t)
7434 struct temp_slot *t;
7436 while (t)
7438 ggc_mark_rtx (t->slot);
7439 ggc_mark_rtx (t->address);
7440 ggc_mark_tree (t->rtl_expr);
7442 t = t->next;
7446 /* Mark P for GC. */
7448 static void
7449 mark_function_status (p)
7450 struct function *p;
7452 int i;
7453 rtx *r;
7455 if (p == 0)
7456 return;
7458 ggc_mark_rtx (p->arg_offset_rtx);
7460 if (p->x_parm_reg_stack_loc)
7461 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7462 i > 0; --i, ++r)
7463 ggc_mark_rtx (*r);
7465 ggc_mark_rtx (p->return_rtx);
7466 ggc_mark_rtx (p->x_cleanup_label);
7467 ggc_mark_rtx (p->x_return_label);
7468 ggc_mark_rtx (p->x_save_expr_regs);
7469 ggc_mark_rtx (p->x_stack_slot_list);
7470 ggc_mark_rtx (p->x_parm_birth_insn);
7471 ggc_mark_rtx (p->x_tail_recursion_label);
7472 ggc_mark_rtx (p->x_tail_recursion_reentry);
7473 ggc_mark_rtx (p->internal_arg_pointer);
7474 ggc_mark_rtx (p->x_arg_pointer_save_area);
7475 ggc_mark_tree (p->x_rtl_expr_chain);
7476 ggc_mark_rtx (p->x_last_parm_insn);
7477 ggc_mark_tree (p->x_context_display);
7478 ggc_mark_tree (p->x_trampoline_list);
7479 ggc_mark_rtx (p->epilogue_delay_list);
7480 ggc_mark_rtx (p->x_clobber_return_insn);
7482 mark_temp_slot (p->x_temp_slots);
7485 struct var_refs_queue *q = p->fixup_var_refs_queue;
7486 while (q)
7488 ggc_mark_rtx (q->modified);
7489 q = q->next;
7493 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7494 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7495 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7496 ggc_mark_tree (p->x_nonlocal_labels);
7499 /* Mark the function chain ARG (which is really a struct function **)
7500 for GC. */
7502 static void
7503 mark_function_chain (arg)
7504 void *arg;
7506 struct function *f = *(struct function **) arg;
7508 for (; f; f = f->next_global)
7510 ggc_mark_tree (f->decl);
7512 mark_function_status (f);
7513 mark_eh_status (f->eh);
7514 mark_stmt_status (f->stmt);
7515 mark_expr_status (f->expr);
7516 mark_emit_status (f->emit);
7517 mark_varasm_status (f->varasm);
7519 if (mark_machine_status)
7520 (*mark_machine_status) (f);
7521 if (mark_lang_status)
7522 (*mark_lang_status) (f);
7524 if (f->original_arg_vector)
7525 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7526 if (f->original_decl_initial)
7527 ggc_mark_tree (f->original_decl_initial);
7531 /* Called once, at initialization, to initialize function.c. */
7533 void
7534 init_function_once ()
7536 ggc_add_root (&all_functions, 1, sizeof all_functions,
7537 mark_function_chain);
7539 VARRAY_INT_INIT (prologue, 0, "prologue");
7540 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7541 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");